hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7a9689f8d65b8c67f2eec82038020f4c92ebe057
| 265
|
py
|
Python
|
lab5/lab/zad1.py
|
BartlomiejRasztabiga/PIPR
|
2d0efd57b3b84855b5a2de335493100d2682d292
|
[
"MIT"
] | null | null | null |
lab5/lab/zad1.py
|
BartlomiejRasztabiga/PIPR
|
2d0efd57b3b84855b5a2de335493100d2682d292
|
[
"MIT"
] | null | null | null |
lab5/lab/zad1.py
|
BartlomiejRasztabiga/PIPR
|
2d0efd57b3b84855b5a2de335493100d2682d292
|
[
"MIT"
] | null | null | null |
def generate_fuzz_list(n):
return ["fuzz" if x % 3 == 0 else x for x in range(1, n+1)]
def generate_fuzz_list_map(n):
return list(map(lambda x: "fuzz" if x % 3 == 0 else x, range(1, n+1)))
print(generate_fuzz_list(12))
print(generate_fuzz_list_map(12))
| 24.090909
| 74
| 0.671698
| 53
| 265
| 3.169811
| 0.358491
| 0.285714
| 0.380952
| 0.22619
| 0.166667
| 0.166667
| 0.166667
| 0
| 0
| 0
| 0
| 0.055046
| 0.177358
| 265
| 10
| 75
| 26.5
| 0.715596
| 0
| 0
| 0
| 1
| 0
| 0.030189
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7aacf8e2b7f05bfa85feb37caec3830da0f8cb6b
| 30,032
|
py
|
Python
|
rsscrawler/url.py
|
9Mad-Max5/RSScrawler
|
f9f690ed990edb524be0d4921a3bc569365eafca
|
[
"MIT"
] | 1
|
2020-01-23T16:54:44.000Z
|
2020-01-23T16:54:44.000Z
|
rsscrawler/url.py
|
9Mad-Max5/RSScrawler
|
f9f690ed990edb524be0d4921a3bc569365eafca
|
[
"MIT"
] | null | null | null |
rsscrawler/url.py
|
9Mad-Max5/RSScrawler
|
f9f690ed990edb524be0d4921a3bc569365eafca
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# RSScrawler
# Projekt von https://github.com/rix1337
import concurrent.futures
import cloudscraper
import requests
from rsscrawler.common import check_is_site
from rsscrawler.config import RssConfig
from rsscrawler.db import RssDb
def check_url(configfile, dbfile, scraper=False):
hostnames = RssConfig('Hostnames', configfile)
sj = hostnames.get('sj')
dj = hostnames.get('dj')
sf = hostnames.get('sf')
mb = hostnames.get('mb')
hw = hostnames.get('hw')
hs = hostnames.get('hs')
fx = hostnames.get('fx')
nk = hostnames.get('nk')
dd = hostnames.get('dd')
fc = hostnames.get('fc')
if not scraper:
scraper = cloudscraper.create_scraper()
sj_url = 'https://' + sj
dj_url = 'https://' + sj
sf_url = 'https://' + sf
mb_url = 'https://' + mb
hw_url = 'https://' + hw
fx_url = 'https://' + fx
hs_url = 'https://' + hs + '/collection/neuerscheinungen/'
nk_url = 'https://' + nk
dd_url = 'https://' + dd
fc_url = 'https://' + fc
sj_blocked_proxy = False
dj_blocked_proxy = False
sf_blocked_proxy = False
mb_blocked_proxy = False
hw_blocked_proxy = False
fx_blocked_proxy = False
hs_blocked_proxy = False
nk_blocked_proxy = False
dd_blocked_proxy = False
fc_blocked_proxy = False
sj_blocked = False
dj_blocked = False
sf_blocked = False
mb_blocked = False
hw_blocked = False
fx_blocked = False
hs_blocked = False
nk_blocked = False
dd_blocked = False
fc_blocked = False
db = RssDb(dbfile, 'proxystatus')
db.delete("SJ")
db.delete("DJ")
db.delete("SF")
db.delete("MB")
db.delete("HW")
db.delete("FX")
db.delete("HS")
db.delete("NK")
db.delete("DD")
db.delete("FC")
db_normal = RssDb(dbfile, 'normalstatus')
db_normal.delete("SJ")
db_normal.delete("DJ")
db_normal.delete("SF")
db_normal.delete("MB")
db_normal.delete("HW")
db_normal.delete("FX")
db_normal.delete("HS")
db_normal.delete("NK")
db_normal.delete("DD")
db_normal.delete("FC")
proxy = RssConfig('RSScrawler', configfile).get('proxy')
fallback = RssConfig('RSScrawler', configfile).get('fallback')
if proxy:
proxies = {'http': proxy, 'https': proxy}
if not sj:
db.store("SJ", "Blocked")
else:
try:
if "block." in str(
scraper.get(sj_url, proxies=proxies, timeout=30,
allow_redirects=False).headers.get("location")):
sj_blocked_proxy = True
else:
db.delete("SJ")
except:
sj_blocked_proxy = True
if sj_blocked_proxy:
print(u"Der Zugriff auf SJ ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("SJ", "Blocked")
scraper = cloudscraper.create_scraper()
if not dj:
db.store("DJ", "Blocked")
else:
try:
if "block." in str(
scraper.get(dj_url, proxies=proxies, timeout=30,
allow_redirects=False).headers.get("location")):
dj_blocked_proxy = True
else:
db.delete("DJ")
except:
dj_blocked_proxy = True
if dj_blocked_proxy:
print(u"Der Zugriff auf DJ ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("DJ", "Blocked")
scraper = cloudscraper.create_scraper()
if not sf:
db.store("SF", "Blocked")
else:
try:
if "block." in str(
scraper.get(sf_url, proxies=proxies, timeout=30,
allow_redirects=False).headers.get("location")):
sf_blocked_proxy = True
else:
db.delete("SF")
except:
sf_blocked_proxy = True
if sf_blocked_proxy:
print(u"Der Zugriff auf SF ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("SF", "Blocked")
scraper = cloudscraper.create_scraper()
if not mb:
db.store("MB", "Blocked")
else:
try:
if "<Response [403]>" in str(
scraper.get(mb_url, proxies=proxies, timeout=30,
allow_redirects=False)):
mb_blocked_proxy = True
else:
db.delete("MB")
except:
mb_blocked_proxy = True
if mb_blocked_proxy:
print(u"Der Zugriff auf MB ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("MB", "Blocked")
scraper = cloudscraper.create_scraper()
if not hw:
db.store("HW", "Blocked")
else:
try:
if "<Response [403]>" in str(
scraper.get(hw_url, proxies=proxies, timeout=30,
allow_redirects=False)):
hw_blocked_proxy = True
else:
db.delete("HW")
except:
hw_blocked_proxy = True
if hw_blocked_proxy:
print(u"Der Zugriff auf HW ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("HW", "Blocked")
scraper = cloudscraper.create_scraper()
if not fx:
db.store("FX", "Blocked")
else:
try:
if "<Response [403]>" in str(
scraper.get(fx_url, proxies=proxies, timeout=30,
allow_redirects=False)):
fx_blocked_proxy = True
else:
db.delete("FX")
except:
fx_blocked_proxy = True
session = requests.session()
session.headers = scraper.headers
session.cookies = scraper.cookies
session.verify = False
if "<Response [200]>" in str(
session.get(fx_url, proxies=proxies, timeout=30,
allow_redirects=False)):
fx_blocked_proxy = False
if fx_blocked_proxy:
print(u"Der Zugriff auf FX ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("FX", "Blocked")
scraper = cloudscraper.create_scraper()
if not hs:
db.store("HS", "Blocked")
else:
try:
if "200" not in str(
scraper.get(hs_url, timeout=30, allow_redirects=False).status_code):
hs_blocked_proxy = True
else:
db.delete("HS")
except:
hs_blocked_proxy = True
if hs_blocked_proxy:
print(u"Der Zugriff auf HS ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("HS", "Blocked")
scraper = cloudscraper.create_scraper()
if not nk:
db.store("NK", "Blocked")
else:
try:
if "200" not in str(
scraper.get(nk_url, timeout=30, allow_redirects=False).status_code):
nk_blocked_proxy = True
else:
db.delete("NK")
except:
nk_blocked_proxy = True
if nk_blocked_proxy:
print(u"Der Zugriff auf NK ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("NK", "Blocked")
scraper = cloudscraper.create_scraper()
if not dd:
db.store("DD", "Blocked")
else:
try:
if "200" not in str(
scraper.get(dd_url, timeout=30, allow_redirects=False).status_code):
dd_blocked_proxy = True
else:
db.delete("DD")
except:
dd_blocked_proxy = True
if dd_blocked_proxy:
print(u"Der Zugriff auf DD ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("DD", "Blocked")
scraper = cloudscraper.create_scraper()
if not fc:
db.store("FC", "Blocked")
else:
try:
if "200" not in str(
scraper.get(fc_url, timeout=30).status_code):
fc_blocked_proxy = True
else:
db.delete("FC")
except:
fc_blocked_proxy = True
if fc_blocked_proxy:
print(u"Der Zugriff auf FC ist mit der aktuellen Proxy-IP nicht möglich!")
db.store("FC", "Blocked")
scraper = cloudscraper.create_scraper()
if not proxy or (proxy and sj_blocked_proxy and fallback):
if not sj:
db.store("SJ", "Blocked")
else:
try:
if "block." in str(
scraper.get(sj_url, timeout=30, allow_redirects=False).headers.get(
"location")):
sj_blocked = True
except:
sj_blocked = True
if sj_blocked:
db_normal.store("SJ", "Blocked")
print(u"Der Zugriff auf SJ ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and dj_blocked_proxy and fallback):
if not dj:
db.store("DJ", "Blocked")
else:
try:
if "block." in str(
scraper.get(dj_url, timeout=30, allow_redirects=False).headers.get(
"location")):
dj_blocked = True
except:
dj_blocked = True
if dj_blocked:
db_normal.store("DJ", "Blocked")
print(u"Der Zugriff auf DJ ist mit der aktuellen IP nicht möglich!")
if not sf:
db.store("SF", "Blocked")
else:
try:
if "block." in str(
scraper.get(sf_url, timeout=30, allow_redirects=False).headers.get(
"location")):
sf_blocked = True
except:
sf_blocked = True
if sf_blocked:
db_normal.store("SF", "Blocked")
print(u"Der Zugriff auf SF ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and mb_blocked_proxy and fallback):
if not mb:
db.store("MB", "Blocked")
else:
try:
if "<Response [403]>" in str(
scraper.get(mb_url, timeout=30, allow_redirects=False)):
mb_blocked = True
except:
mb_blocked = True
if mb_blocked:
db_normal.store("MB", "Blocked")
print(u"Der Zugriff auf MB ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and hw_blocked_proxy and fallback):
if not hw:
db.store("HW", "Blocked")
else:
try:
if "<Response [403]>" in str(
scraper.get(hw_url, timeout=30, allow_redirects=False)):
hw_blocked = True
except:
hw_blocked = True
if hw_blocked:
db_normal.store("HW", "Blocked")
print(u"Der Zugriff auf HW ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and fx_blocked_proxy and fallback):
if not fx:
db.store("FX", "Blocked")
else:
try:
if "<Response [403]>" in str(
scraper.get(fx_url, timeout=30, allow_redirects=False)):
fx_blocked = True
except:
fx_blocked = True
session = requests.session()
session.headers = scraper.headers
session.cookies = scraper.cookies
session.verify = False
if "<Response [200]>" in str(
session.get(fx_url, timeout=30, allow_redirects=False)):
fx_blocked = False
if fx_blocked:
db_normal.store("FX", "Blocked")
print(u"Der Zugriff auf FX ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and hs_blocked_proxy and fallback):
if not hs:
db.store("HS", "Blocked")
else:
try:
if "200" not in str(
scraper.get(hs_url, timeout=30, allow_redirects=False).status_code):
hs_blocked = True
except:
hs_blocked = True
if hs_blocked:
db_normal.store("HS", "Blocked")
print(u"Der Zugriff auf HS ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and nk_blocked_proxy and fallback):
if not nk:
db.store("NK", "Blocked")
else:
try:
if "200" not in str(
scraper.get(nk_url, timeout=30, allow_redirects=False).status_code):
nk_blocked = True
except:
nk_blocked = True
if nk_blocked:
db_normal.store("NK", "Blocked")
print(u"Der Zugriff auf NK ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and dd_blocked_proxy and fallback):
if not dd:
db.store("DD", "Blocked")
else:
try:
if "200" not in str(
scraper.get(dd_url, timeout=30, allow_redirects=False).status_code):
dd_blocked = True
except:
dd_blocked = True
if dd_blocked:
db_normal.store("DD", "Blocked")
print(u"Der Zugriff auf DD ist mit der aktuellen IP nicht möglich!")
if not proxy or (proxy and fc_blocked_proxy and fallback):
if not fc:
db.store("FC", "Blocked")
else:
try:
if "200" not in str(
scraper.get(fc_url, timeout=30).status_code):
fc_blocked = True
except:
fc_blocked = True
if fc_blocked:
db_normal.store("FC", "Blocked")
print(u"Der Zugriff auf FC ist mit der aktuellen IP nicht möglich!")
return scraper
def get_url(url, configfile, dbfile, scraper=False):
config = RssConfig('RSScrawler', configfile)
proxy = config.get('proxy')
if not scraper:
scraper = cloudscraper.create_scraper()
db = RssDb(dbfile, 'proxystatus')
db_normal = RssDb(dbfile, 'normalstatus')
site = check_is_site(url, configfile)
if proxy:
try:
if site and "SJ" in site:
if db.retrieve("SJ"):
if config.get("fallback") and not db_normal.retrieve("SJ"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "DJ" in site:
if db.retrieve("DJ"):
if config.get("fallback") and not db_normal.retrieve("DJ"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "SF" in site:
if db.retrieve("SF"):
if config.get("fallback") and not db_normal.retrieve("SF"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "MB" in site:
if db.retrieve("MB"):
if config.get("fallback") and not db_normal.retrieve("MB"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "HW" in site:
if db.retrieve("HW"):
if config.get("fallback") and not db_normal.retrieve("HW"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "FX" in site:
if db.retrieve("FX"):
if config.get("fallback") and not db_normal.retrieve("FX"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "HS" in site:
if db.retrieve("HS"):
if config.get("fallback") and not db_normal.retrieve("HS"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "NK" in site:
if db.retrieve("NK"):
if config.get("fallback") and not db_normal.retrieve("NK"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "DD" in site:
if db.retrieve("DD"):
if config.get("fallback") and not db_normal.retrieve("DD"):
return scraper.get(url, timeout=30).text
else:
return ""
elif site and "FC" in site:
if db.retrieve("FC"):
if config.get("fallback") and not db_normal.retrieve("FC"):
return scraper.get(url, timeout=30).text
else:
return ""
proxies = {'http': proxy, 'https': proxy}
response = scraper.get(url, proxies=proxies, timeout=30).text
return response
except Exception as e:
print(u"Fehler beim Abruf von: " + url + " " + str(e))
return ""
else:
try:
if site and "SJ" in site and db_normal.retrieve("SJ"):
return ""
elif site and "DJ" in site and db_normal.retrieve("DJ"):
return ""
elif site and "SF" in site and db_normal.retrieve("SF"):
return ""
elif site and "MB" in site and db_normal.retrieve("MB"):
return ""
elif site and "HW" in site and db_normal.retrieve("HW"):
return ""
elif site and "FX" in site and db_normal.retrieve("FX"):
return ""
elif site and "HS" in site and db_normal.retrieve("HS"):
return ""
elif site and "NK" in site and db_normal.retrieve("NK"):
return ""
elif site and "DD" in site and db_normal.retrieve("DD"):
return ""
elif site and "FC" in site and db_normal.retrieve("FC"):
return ""
response = scraper.get(url, timeout=30).text
return response
except Exception as e:
print(u"Fehler beim Abruf von: " + url + " " + str(e))
return ""
def get_url_headers(url, configfile, dbfile, headers, scraper=False):
config = RssConfig('RSScrawler', configfile)
proxy = config.get('proxy')
if not scraper:
scraper = cloudscraper.create_scraper()
db = RssDb(dbfile, 'proxystatus')
db_normal = RssDb(dbfile, 'normalstatus')
site = check_is_site(url, configfile)
if proxy:
try:
if site and "SJ" in site:
if db.retrieve("SJ"):
if config.get("fallback") and not db_normal.retrieve("SJ"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "DJ" in site:
if db.retrieve("DJ"):
if config.get("fallback") and not db_normal.retrieve("DJ"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "SF" in site:
if db.retrieve("SF"):
if config.get("fallback") and not db_normal.retrieve("SF"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "MB" in site:
if db.retrieve("MB"):
if config.get("fallback") and not db_normal.retrieve("MB"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "HW" in site:
if db.retrieve("HW"):
if config.get("fallback") and not db_normal.retrieve("HW"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "FX" in site:
if db.retrieve("FX"):
if config.get("fallback") and not db_normal.retrieve("FX"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "HS" in site:
if db.retrieve("HS"):
if config.get("fallback") and not db_normal.retrieve("HS"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "NK" in site:
if db.retrieve("NK"):
if config.get("fallback") and not db_normal.retrieve("NK"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "DD" in site:
if db.retrieve("DD"):
if config.get("fallback") and not db_normal.retrieve("DD"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
elif site and "FC" in site:
if db.retrieve("FC"):
if config.get("fallback") and not db_normal.retrieve("FC"):
return [scraper.get(url, headers=headers, timeout=30), scraper]
else:
return ["", scraper]
proxies = {'http': proxy, 'https': proxy}
response = scraper.get(url, headers=headers, proxies=proxies, timeout=30)
return [response, scraper]
except Exception as e:
print(u"Fehler beim Abruf von: " + url + " " + str(e))
return ["", scraper]
else:
try:
if site and "SJ" in site and db_normal.retrieve("SJ"):
return ["", scraper]
elif site and "DJ" in site and db_normal.retrieve("DJ"):
return ["", scraper]
elif site and "SF" in site and db_normal.retrieve("SF"):
return ["", scraper]
elif site and "MB" in site and db_normal.retrieve("MB"):
return ["", scraper]
elif site and "HW" in site and db_normal.retrieve("HW"):
return ["", scraper]
elif site and "FX" in site and db_normal.retrieve("FX"):
return ["", scraper]
elif site and "HS" in site and db_normal.retrieve("HS"):
return ["", scraper]
elif site and "NK" in site and db_normal.retrieve("NK"):
return ["", scraper]
elif site and "DD" in site and db_normal.retrieve("DD"):
return ["", scraper]
elif site and "FC" in site and db_normal.retrieve("FC"):
return ["", scraper]
response = scraper.get(url, headers=headers, timeout=30)
return [response, scraper]
except Exception as e:
print(u"Fehler beim Abruf von: " + url + " " + str(e))
return ["", scraper]
def post_url(url, configfile, dbfile, data, scraper=False):
config = RssConfig('RSScrawler', configfile)
proxy = config.get('proxy')
if not scraper:
scraper = cloudscraper.create_scraper()
db = RssDb(dbfile, 'proxystatus')
db_normal = RssDb(dbfile, 'normalstatus')
site = check_is_site(url, configfile)
if proxy:
try:
if site and "SJ" in site:
if db.retrieve("SJ"):
if config.get("fallback") and not db_normal.retrieve("SJ"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "DJ" in site:
if db.retrieve("DJ"):
if config.get("fallback") and not db_normal.retrieve("DJ"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "SF" in site:
if db.retrieve("SF"):
if config.get("fallback") and not db_normal.retrieve("SF"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "MB" in site:
if db.retrieve("MB"):
if config.get("fallback") and not db_normal.retrieve("MB"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "HW" in site:
if db.retrieve("HW"):
if config.get("fallback") and not db_normal.retrieve("HW"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "FX" in site:
if db.retrieve("FX"):
if config.get("fallback") and not db_normal.retrieve("FX"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "HS" in site:
if db.retrieve("HS"):
if config.get("fallback") and not db_normal.retrieve("HS"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "NK" in site:
if db.retrieve("NK"):
if config.get("fallback") and not db_normal.retrieve("NK"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "DD" in site:
if db.retrieve("DD"):
if config.get("fallback") and not db_normal.retrieve("DD"):
return scraper.post(url, data, timeout=30).content
else:
return ""
elif site and "FC" in site:
if db.retrieve("FC"):
if config.get("fallback") and not db_normal.retrieve("FC"):
return scraper.post(url, data, timeout=30).content
else:
return ""
proxies = {'http': proxy, 'https': proxy}
response = scraper.post(url, data, proxies=proxies, timeout=30).content
return response
except Exception as e:
print(u"Fehler beim Abruf von: " + url + " " + str(e))
return ""
else:
try:
if site and "SJ" in site and db_normal.retrieve("SJ"):
return ""
elif site and "DJ" in site and db_normal.retrieve("DJ"):
return ""
elif site and "SF" in site and db_normal.retrieve("SF"):
return ""
elif site and "MB" in site and db_normal.retrieve("MB"):
return ""
elif site and "HW" in site and db_normal.retrieve("HW"):
return ""
elif site and "FX" in site and db_normal.retrieve("FX"):
return ""
elif site and "HS" in site and db_normal.retrieve("HS"):
return ""
elif site and "NK" in site and db_normal.retrieve("NK"):
return ""
elif site and "DD" in site and db_normal.retrieve("DD"):
return ""
elif site and "FC" in site and db_normal.retrieve("FC"):
return ""
response = scraper.post(url, data, timeout=30).content
return response
except Exception as e:
print(u"Fehler beim Abruf von: " + url + " " + str(e))
return ""
def get_urls_async(urls, configfile, dbfile, scraper=False):
if not scraper:
scraper = cloudscraper.create_scraper()
results = []
def load_url(url):
return get_url(url, configfile, dbfile, scraper)
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
future_to_url = {executor.submit(load_url, url): url for url in urls}
for future in concurrent.futures.as_completed(future_to_url):
future_to_url[future]
try:
results.append(future.result())
except Exception:
pass
return [results, scraper]
| 39.672391
| 92
| 0.488412
| 3,286
| 30,032
| 4.364577
| 0.037127
| 0.043927
| 0.066936
| 0.042672
| 0.828057
| 0.816344
| 0.775275
| 0.723191
| 0.712941
| 0.700948
| 0
| 0.009584
| 0.409363
| 30,032
| 756
| 93
| 39.724868
| 0.798963
| 0.002364
| 0
| 0.78267
| 0
| 0
| 0.096939
| 0.000968
| 0
| 0
| 0
| 0
| 0
| 1
| 0.008523
| false
| 0.00142
| 0.008523
| 0.00142
| 0.166193
| 0.036932
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8fccd44713ff21d25e2713c95ef5e6e0dfb0bdd0
| 102
|
py
|
Python
|
ttarray/core/truncate.py
|
sonnerm/ttarray
|
c962cb2be303dfdb6743aa802bd11b89043e7b71
|
[
"MIT"
] | null | null | null |
ttarray/core/truncate.py
|
sonnerm/ttarray
|
c962cb2be303dfdb6743aa802bd11b89043e7b71
|
[
"MIT"
] | null | null | null |
ttarray/core/truncate.py
|
sonnerm/ttarray
|
c962cb2be303dfdb6743aa802bd11b89043e7b71
|
[
"MIT"
] | null | null | null |
from ..dispatch import implement_function
def truncate(a,**kwargs):
return a.truncate(a,**kwargs)
| 25.5
| 41
| 0.745098
| 14
| 102
| 5.357143
| 0.714286
| 0.24
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 102
| 3
| 42
| 34
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
8fdc30c10b84528fd44b245fa7c7231736b54151
| 6,454
|
py
|
Python
|
Chapter_7_code/build/hector_uav_msgs/cmake/hector_uav_msgs-genmsg-context.py
|
crepuscularlight/ROSbyExample
|
fa7b1a60cacca9b1034e318a2ac16ce4c8530d7c
|
[
"MIT"
] | 1
|
2021-04-23T10:01:22.000Z
|
2021-04-23T10:01:22.000Z
|
Chapter_7_code/build/hector_uav_msgs/cmake/hector_uav_msgs-genmsg-context.py
|
crepuscularlight/ROSbyExample
|
fa7b1a60cacca9b1034e318a2ac16ce4c8530d7c
|
[
"MIT"
] | null | null | null |
Chapter_7_code/build/hector_uav_msgs/cmake/hector_uav_msgs-genmsg-context.py
|
crepuscularlight/ROSbyExample
|
fa7b1a60cacca9b1034e318a2ac16ce4c8530d7c
|
[
"MIT"
] | null | null | null |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/Altimeter.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/AttitudeCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/Compass.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/ControllerState.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/HeadingCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/HeightCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/MotorCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/MotorPWM.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/MotorStatus.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/PositionXYCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/RawImu.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/RawMagnetic.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/RawRC.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/RC.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/RuddersCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/ServoCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/Supply.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/ThrustCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/VelocityXYCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/VelocityZCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg/YawrateCommand.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseAction.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseActionGoal.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseActionResult.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseActionFeedback.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseGoal.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseResult.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/PoseFeedback.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingAction.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingActionGoal.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingActionResult.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingActionFeedback.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingGoal.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingResult.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/LandingFeedback.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffAction.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffActionGoal.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffActionResult.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffActionFeedback.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffGoal.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffResult.msg;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg/TakeoffFeedback.msg"
services_str = "/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/srv/EnableMotors.srv"
pkg_name = "hector_uav_msgs"
dependencies_str = "actionlib_msgs;std_msgs;geometry_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "hector_uav_msgs;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/src/hector_quadrotor/hector_uav_msgs/msg;hector_uav_msgs;/home/liudiyang1998/Git/ROS-Robotics-By-Example/Chapter_7_code/devel/.private/hector_uav_msgs/share/hector_uav_msgs/msg;actionlib_msgs;/opt/ros/melodic/share/actionlib_msgs/cmake/../msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| 537.833333
| 5,473
| 0.865045
| 1,014
| 6,454
| 5.231755
| 0.102564
| 0.118756
| 0.171536
| 0.195099
| 0.820358
| 0.807917
| 0.807917
| 0.807917
| 0.807917
| 0.807917
| 0
| 0.035202
| 0.005268
| 6,454
| 11
| 5,474
| 586.727273
| 0.791122
| 0.007592
| 0
| 0
| 1
| 0.333333
| 0.968296
| 0.96283
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 14
|
8fe027e664646c2323cdc729f2844c238f677135
| 125
|
py
|
Python
|
django_sorcery/db/meta/__init__.py
|
shosca/django-sorcery
|
1d16c7affe7b8cc8185b7c2ff312ee13efe8f23a
|
[
"MIT"
] | 73
|
2018-05-04T12:44:49.000Z
|
2022-02-16T23:32:04.000Z
|
django_sorcery/db/meta/__init__.py
|
shosca/django-sorcery
|
1d16c7affe7b8cc8185b7c2ff312ee13efe8f23a
|
[
"MIT"
] | 119
|
2018-05-07T14:15:59.000Z
|
2022-03-27T02:29:03.000Z
|
django_sorcery/db/meta/__init__.py
|
shosca/django-sorcery
|
1d16c7affe7b8cc8185b7c2ff312ee13efe8f23a
|
[
"MIT"
] | 9
|
2018-08-06T18:50:09.000Z
|
2021-07-30T08:01:25.000Z
|
from .column import * # noqa
from .composite import * # noqa
from .model import * # noqa
from .relations import * # noqa
| 25
| 32
| 0.68
| 16
| 125
| 5.3125
| 0.4375
| 0.470588
| 0.494118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224
| 125
| 4
| 33
| 31.25
| 0.876289
| 0.152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
64e444c7d811b3d6b2171d60abb0967268b04194
| 32,297
|
py
|
Python
|
obs-thrift-api/tests/obsthriftserver/ObsThriftServer.py
|
pashkal/obs-thrift-api
|
80635f9466d2c819c0d3099492b2e23fd9876821
|
[
"Apache-2.0"
] | 39
|
2020-05-03T08:48:07.000Z
|
2022-02-04T21:12:24.000Z
|
obs-thrift-api/tests/obsthriftserver/ObsThriftServer.py
|
pashkal/obs-thrift-api
|
80635f9466d2c819c0d3099492b2e23fd9876821
|
[
"Apache-2.0"
] | 6
|
2020-07-07T10:20:20.000Z
|
2021-07-30T18:21:28.000Z
|
obs-thrift-api/tests/obsthriftserver/ObsThriftServer.py
|
pashkal/obs-thrift-api
|
80635f9466d2c819c0d3099492b2e23fd9876821
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.14.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def launchVideo(self, path, layer, sceneName, sourceName, dimensions, clearOnMediaEnd):
"""
Parameters:
- path
- layer
- sceneName
- sourceName
- dimensions
- clearOnMediaEnd
"""
pass
def removeSource(self, sceneName, sourceName):
"""
Parameters:
- sceneName
- sourceName
"""
pass
def muteSource(self, sourceName):
"""
Parameters:
- sourceName
"""
pass
def unmuteSource(self, sourceName):
"""
Parameters:
- sourceName
"""
pass
def heartbeat(self):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def launchVideo(self, path, layer, sceneName, sourceName, dimensions, clearOnMediaEnd):
"""
Parameters:
- path
- layer
- sceneName
- sourceName
- dimensions
- clearOnMediaEnd
"""
self.send_launchVideo(path, layer, sceneName, sourceName, dimensions, clearOnMediaEnd)
self.recv_launchVideo()
def send_launchVideo(self, path, layer, sceneName, sourceName, dimensions, clearOnMediaEnd):
self._oprot.writeMessageBegin('launchVideo', TMessageType.CALL, self._seqid)
args = launchVideo_args()
args.path = path
args.layer = layer
args.sceneName = sceneName
args.sourceName = sourceName
args.dimensions = dimensions
args.clearOnMediaEnd = clearOnMediaEnd
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_launchVideo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = launchVideo_result()
result.read(iprot)
iprot.readMessageEnd()
return
def removeSource(self, sceneName, sourceName):
"""
Parameters:
- sceneName
- sourceName
"""
self.send_removeSource(sceneName, sourceName)
self.recv_removeSource()
def send_removeSource(self, sceneName, sourceName):
self._oprot.writeMessageBegin('removeSource', TMessageType.CALL, self._seqid)
args = removeSource_args()
args.sceneName = sceneName
args.sourceName = sourceName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_removeSource(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = removeSource_result()
result.read(iprot)
iprot.readMessageEnd()
return
def muteSource(self, sourceName):
"""
Parameters:
- sourceName
"""
self.send_muteSource(sourceName)
self.recv_muteSource()
def send_muteSource(self, sourceName):
self._oprot.writeMessageBegin('muteSource', TMessageType.CALL, self._seqid)
args = muteSource_args()
args.sourceName = sourceName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_muteSource(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = muteSource_result()
result.read(iprot)
iprot.readMessageEnd()
return
def unmuteSource(self, sourceName):
"""
Parameters:
- sourceName
"""
self.send_unmuteSource(sourceName)
self.recv_unmuteSource()
def send_unmuteSource(self, sourceName):
self._oprot.writeMessageBegin('unmuteSource', TMessageType.CALL, self._seqid)
args = unmuteSource_args()
args.sourceName = sourceName
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_unmuteSource(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = unmuteSource_result()
result.read(iprot)
iprot.readMessageEnd()
return
def heartbeat(self):
self.send_heartbeat()
self.recv_heartbeat()
def send_heartbeat(self):
self._oprot.writeMessageBegin('heartbeat', TMessageType.CALL, self._seqid)
args = heartbeat_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_heartbeat(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = heartbeat_result()
result.read(iprot)
iprot.readMessageEnd()
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["launchVideo"] = Processor.process_launchVideo
self._processMap["removeSource"] = Processor.process_removeSource
self._processMap["muteSource"] = Processor.process_muteSource
self._processMap["unmuteSource"] = Processor.process_unmuteSource
self._processMap["heartbeat"] = Processor.process_heartbeat
self._on_message_begin = None
def on_message_begin(self, func):
self._on_message_begin = func
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if self._on_message_begin:
self._on_message_begin(name, type, seqid)
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_launchVideo(self, seqid, iprot, oprot):
args = launchVideo_args()
args.read(iprot)
iprot.readMessageEnd()
result = launchVideo_result()
try:
self._handler.launchVideo(args.path, args.layer, args.sceneName, args.sourceName, args.dimensions, args.clearOnMediaEnd)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("launchVideo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_removeSource(self, seqid, iprot, oprot):
args = removeSource_args()
args.read(iprot)
iprot.readMessageEnd()
result = removeSource_result()
try:
self._handler.removeSource(args.sceneName, args.sourceName)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("removeSource", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_muteSource(self, seqid, iprot, oprot):
args = muteSource_args()
args.read(iprot)
iprot.readMessageEnd()
result = muteSource_result()
try:
self._handler.muteSource(args.sourceName)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("muteSource", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_unmuteSource(self, seqid, iprot, oprot):
args = unmuteSource_args()
args.read(iprot)
iprot.readMessageEnd()
result = unmuteSource_result()
try:
self._handler.unmuteSource(args.sourceName)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("unmuteSource", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_heartbeat(self, seqid, iprot, oprot):
args = heartbeat_args()
args.read(iprot)
iprot.readMessageEnd()
result = heartbeat_result()
try:
self._handler.heartbeat()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("heartbeat", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class launchVideo_args(object):
"""
Attributes:
- path
- layer
- sceneName
- sourceName
- dimensions
- clearOnMediaEnd
"""
def __init__(self, path=None, layer=None, sceneName=None, sourceName=None, dimensions=None, clearOnMediaEnd=None,):
self.path = path
self.layer = layer
self.sceneName = sceneName
self.sourceName = sourceName
self.dimensions = dimensions
self.clearOnMediaEnd = clearOnMediaEnd
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.path = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.layer = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.sceneName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.sourceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.dimensions = SourceDimensions()
self.dimensions.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.clearOnMediaEnd = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('launchVideo_args')
if self.path is not None:
oprot.writeFieldBegin('path', TType.STRING, 1)
oprot.writeString(self.path.encode('utf-8') if sys.version_info[0] == 2 else self.path)
oprot.writeFieldEnd()
if self.layer is not None:
oprot.writeFieldBegin('layer', TType.I32, 2)
oprot.writeI32(self.layer)
oprot.writeFieldEnd()
if self.sceneName is not None:
oprot.writeFieldBegin('sceneName', TType.STRING, 3)
oprot.writeString(self.sceneName.encode('utf-8') if sys.version_info[0] == 2 else self.sceneName)
oprot.writeFieldEnd()
if self.sourceName is not None:
oprot.writeFieldBegin('sourceName', TType.STRING, 4)
oprot.writeString(self.sourceName.encode('utf-8') if sys.version_info[0] == 2 else self.sourceName)
oprot.writeFieldEnd()
if self.dimensions is not None:
oprot.writeFieldBegin('dimensions', TType.STRUCT, 5)
self.dimensions.write(oprot)
oprot.writeFieldEnd()
if self.clearOnMediaEnd is not None:
oprot.writeFieldBegin('clearOnMediaEnd', TType.BOOL, 6)
oprot.writeBool(self.clearOnMediaEnd)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(launchVideo_args)
launchVideo_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'path', 'UTF8', None, ), # 1
(2, TType.I32, 'layer', None, None, ), # 2
(3, TType.STRING, 'sceneName', 'UTF8', None, ), # 3
(4, TType.STRING, 'sourceName', 'UTF8', None, ), # 4
(5, TType.STRUCT, 'dimensions', [SourceDimensions, None], None, ), # 5
(6, TType.BOOL, 'clearOnMediaEnd', None, None, ), # 6
)
class launchVideo_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('launchVideo_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(launchVideo_result)
launchVideo_result.thrift_spec = (
)
class removeSource_args(object):
"""
Attributes:
- sceneName
- sourceName
"""
def __init__(self, sceneName=None, sourceName=None,):
self.sceneName = sceneName
self.sourceName = sourceName
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.sceneName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.sourceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('removeSource_args')
if self.sceneName is not None:
oprot.writeFieldBegin('sceneName', TType.STRING, 1)
oprot.writeString(self.sceneName.encode('utf-8') if sys.version_info[0] == 2 else self.sceneName)
oprot.writeFieldEnd()
if self.sourceName is not None:
oprot.writeFieldBegin('sourceName', TType.STRING, 2)
oprot.writeString(self.sourceName.encode('utf-8') if sys.version_info[0] == 2 else self.sourceName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(removeSource_args)
removeSource_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'sceneName', 'UTF8', None, ), # 1
(2, TType.STRING, 'sourceName', 'UTF8', None, ), # 2
)
class removeSource_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('removeSource_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(removeSource_result)
removeSource_result.thrift_spec = (
)
class muteSource_args(object):
"""
Attributes:
- sourceName
"""
def __init__(self, sourceName=None,):
self.sourceName = sourceName
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.sourceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('muteSource_args')
if self.sourceName is not None:
oprot.writeFieldBegin('sourceName', TType.STRING, 1)
oprot.writeString(self.sourceName.encode('utf-8') if sys.version_info[0] == 2 else self.sourceName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(muteSource_args)
muteSource_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'sourceName', 'UTF8', None, ), # 1
)
class muteSource_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('muteSource_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(muteSource_result)
muteSource_result.thrift_spec = (
)
class unmuteSource_args(object):
"""
Attributes:
- sourceName
"""
def __init__(self, sourceName=None,):
self.sourceName = sourceName
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.sourceName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('unmuteSource_args')
if self.sourceName is not None:
oprot.writeFieldBegin('sourceName', TType.STRING, 1)
oprot.writeString(self.sourceName.encode('utf-8') if sys.version_info[0] == 2 else self.sourceName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(unmuteSource_args)
unmuteSource_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'sourceName', 'UTF8', None, ), # 1
)
class unmuteSource_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('unmuteSource_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(unmuteSource_result)
unmuteSource_result.thrift_spec = (
)
class heartbeat_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('heartbeat_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(heartbeat_args)
heartbeat_args.thrift_spec = (
)
class heartbeat_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('heartbeat_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(heartbeat_result)
heartbeat_result.thrift_spec = (
)
fix_spec(all_structs)
del all_structs
| 33.748171
| 134
| 0.605629
| 3,344
| 32,297
| 5.609749
| 0.052931
| 0.013593
| 0.024468
| 0.012794
| 0.801162
| 0.770457
| 0.748228
| 0.724879
| 0.700517
| 0.700517
| 0
| 0.004725
| 0.292349
| 32,297
| 956
| 135
| 33.783473
| 0.816058
| 0.02251
| 0
| 0.760811
| 1
| 0
| 0.03702
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125676
| false
| 0.006757
| 0.010811
| 0.040541
| 0.244595
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8f268bf25223bf60b1756877a318bd413bc9e404
| 3,992
|
py
|
Python
|
qap/test_dvars.py
|
manwithadodla/quality-assessment-protocol
|
9f4d660bd67eb20d4b4a28ae7e837e6d396f0318
|
[
"BSD-3-Clause"
] | 38
|
2015-01-23T20:07:22.000Z
|
2021-11-08T07:08:27.000Z
|
qap/test_dvars.py
|
manwithadodla/quality-assessment-protocol
|
9f4d660bd67eb20d4b4a28ae7e837e6d396f0318
|
[
"BSD-3-Clause"
] | 107
|
2015-01-09T00:34:34.000Z
|
2022-02-28T07:44:10.000Z
|
qap/test_dvars.py
|
manwithadodla/quality-assessment-protocol
|
9f4d660bd67eb20d4b4a28ae7e837e6d396f0318
|
[
"BSD-3-Clause"
] | 24
|
2015-09-14T16:11:12.000Z
|
2021-10-04T08:09:16.000Z
|
import pytest
test_sub_dir = "test_data"
@pytest.mark.quick
def test_remove_zero_variance_voxels():
import os
import pickle
import pkg_resources as p
import nibabel as nb
import numpy as np
from qap.dvars import remove_zero_variance_voxels
func_reorient = p.resource_filename("qap", os.path.join(test_sub_dir, \
"func_reorient.nii.gz"))
func_mask = p.resource_filename("qap", os.path.join(test_sub_dir, \
"functional_brain_mask.nii.gz"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"no_zero_variance_voxels_mask.p"))
func_img = nb.load(func_reorient)
mask_img = nb.load(func_mask)
func_data = func_img.get_data()
mask_data = mask_img.get_data()
out_mask_data = remove_zero_variance_voxels(func_data, mask_data)
with open(ref_out, "r") as f:
ref_mask_data = pickle.load(f)
np.testing.assert_array_equal(ref_mask_data, out_mask_data)
@pytest.mark.quick
def test_load():
import os
import pickle
import pkg_resources as p
import nibabel as nb
import numpy as np
from qap.dvars import load
func_reorient = p.resource_filename("qap", os.path.join(test_sub_dir, \
"func_reorient.nii.gz"))
func_mask = p.resource_filename("qap", os.path.join(test_sub_dir, \
"functional_brain_mask.nii.gz"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"loaded_func.p"))
func_out_data = load(func_reorient, func_mask)
# match the reference array
func_out_data = func_out_data[0:10]
with open(ref_out, "r") as f:
ref_out_data = pickle.load(f)
np.testing.assert_array_equal(ref_out_data, func_out_data)
@pytest.mark.quick
def test_robust_stdev():
import os
import pickle
import pkg_resources as p
import nibabel as nb
import numpy as np
from qap.dvars import robust_stdev
func_data_file = p.resource_filename("qap", os.path.join(test_sub_dir, \
"loaded_func.p"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"robust_stdev_output.p"))
with open(func_data_file, "r") as f:
func_data = pickle.load(f)
with open(ref_out, "r") as f:
ref_mask_data = pickle.load(f)
func_out_data = robust_stdev(func_data)
np.testing.assert_array_equal(ref_mask_data, func_out_data)
@pytest.mark.quick
def test_ar1():
import os
import pickle
import pkg_resources as p
import nibabel as nb
import numpy as np
from qap.dvars import ar1
func_data_file = p.resource_filename("qap", os.path.join(test_sub_dir, \
"loaded_func.p"))
ref_out = p.resource_filename("qap", os.path.join(test_sub_dir, \
"ar1_output.p"))
with open(func_data_file, "r") as f:
func_data = pickle.load(f)
with open(ref_out, "r") as f:
ref_out_data = pickle.load(f)
func_out_data = ar1(func_data)
np.testing.assert_array_almost_equal(ref_out_data, func_out_data)
| 30.707692
| 96
| 0.522295
| 480
| 3,992
| 4.027083
| 0.13125
| 0.037248
| 0.056906
| 0.103466
| 0.829798
| 0.793585
| 0.74806
| 0.721676
| 0.700983
| 0.6627
| 0
| 0.002923
| 0.40005
| 3,992
| 129
| 97
| 30.945736
| 0.804175
| 0.006263
| 0
| 0.670886
| 0
| 0
| 0.061302
| 0.026993
| 0
| 0
| 0
| 0
| 0.050633
| 1
| 0.050633
| false
| 0
| 0.316456
| 0
| 0.367089
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
56b98a6825ad04d1cefea30733acb09c4f23f793
| 12,508
|
py
|
Python
|
unittests/tools/test_mozilla_observatory_parser.py
|
M-Rod101/django-DefectDojo
|
7b09a00b1a526abaf40455c2ddec16aaa06b16e2
|
[
"BSD-3-Clause"
] | 249
|
2016-09-06T21:04:40.000Z
|
2018-01-19T15:59:44.000Z
|
unittests/tools/test_mozilla_observatory_parser.py
|
OWASP/django-DefectDojo
|
c101e47b294863877cd68a82d0cc60f8017b45b1
|
[
"BSD-3-Clause"
] | 255
|
2016-09-06T21:36:37.000Z
|
2018-01-19T19:57:57.000Z
|
unittests/tools/test_mozilla_observatory_parser.py
|
M-Rod101/django-DefectDojo
|
7b09a00b1a526abaf40455c2ddec16aaa06b16e2
|
[
"BSD-3-Clause"
] | 152
|
2016-09-06T21:04:54.000Z
|
2018-01-18T08:52:24.000Z
|
from ..dojo_test_case import DojoTestCase
from dojo.models import Test
from dojo.tools.mozilla_observatory.parser import MozillaObservatoryParser
class TestMozillaObservatoryParser(DojoTestCase):
def test_parse_file_with_no_vuln_has_no_findings(self):
testfile = open("unittests/scans/mozilla_observatory/mozilla_no_vuln.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(4, len(findings))
# test that all findings are not active
for finding in findings:
self.assertFalse(finding.active)
if "strict-transport-security" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertEqual("Preloaded via the HTTP Strict Transport Security (HSTS) preloading process", finding.title)
self.assertEqual("Info", finding.severity)
self.assertIn("Preloaded via the HTTP Strict Transport Security (HSTS) preloading process", finding.description)
def test_parse_file_with_two_vuln_has_two_findings(self):
testfile = open("unittests/scans/mozilla_observatory/mozilla_gitlab_two_vuln.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(2, len(findings))
def test_parse_file_with_multiple_vuln_has_multiple_finding(self):
testfile = open("unittests/scans/mozilla_observatory/mozilla_google_many_vuln.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(6, len(findings))
def test_parse_file_cli_mozilla_org(self):
"""Test from the CLI"""
testfile = open("unittests/scans/mozilla_observatory/mozilla_org.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(12, len(findings))
for finding in findings:
if "content-security-policy" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Content Security Policy (CSP) implemented unsafely. This includes 'unsafe-inline' or data: inside script-src, overly broad sources such as https: inside object-src or script-src, or not restricting the sources for object-src or script-src.", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("Content Security Policy (CSP) implemented unsafely. This includes 'unsafe-inline' or data: inside script-src, overly broad sources such as https: inside object-src or script-src, or not restricting the sources for object-src or script-src.", finding.description)
else:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertFalse(finding.active)
def test_parse_file_cli_demo(self):
"""Test from the CLI"""
testfile = open("unittests/scans/mozilla_observatory/demo.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(12, len(findings))
for finding in findings:
if "content-security-policy" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool="content-security-policy"):
self.assertTrue(finding.active)
self.assertEqual("Content Security Policy (CSP) header not implemented", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("Content Security Policy (CSP) header not implemented", finding.description)
self.assertEqual("content-security-policy", finding.vuln_id_from_tool)
elif "cookies" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool="cookies"):
self.assertTrue(finding.active)
self.assertEqual("Cookies set without using the Secure flag or set over HTTP", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("Cookies set without using the Secure flag or set over HTTP", finding.description)
elif "strict-transport-security" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool="strict-transport-security"):
self.assertTrue(finding.active)
self.assertEqual("HTTP Strict Transport Security (HSTS) header not implemented", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("HTTP Strict Transport Security (HSTS) header not implemented", finding.description)
else:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertFalse(finding.active)
def test_parse_file_cli_juicy(self):
"""Test from the CLI"""
testfile = open("unittests/scans/mozilla_observatory/juicy.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(12, len(findings))
for finding in findings:
if "content-security-policy" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Content Security Policy (CSP) header not implemented", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("Content Security Policy (CSP) header not implemented", finding.description)
elif "strict-transport-security" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("HTTP Strict Transport Security (HSTS) header not implemented", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("HTTP Strict Transport Security (HSTS) header not implemented", finding.description)
elif "x-xss-protection" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("X-XSS-Protection header not implemented", finding.title)
self.assertEqual("Low", finding.severity)
self.assertIn("X-XSS-Protection header not implemented", finding.description)
elif "subresource-integrity" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or use protocol-relative URLs via src=\"//...\"", finding.title)
self.assertEqual("High", finding.severity)
self.assertIn("Subresource Integrity (SRI) not implemented", finding.description)
elif "redirection" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Does not redirect to an HTTPS site", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("Does not redirect to an HTTPS site", finding.description)
else:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertFalse(finding.active)
def test_parse_file_cli_nmap_scanme(self):
"""Test from the CLI"""
testfile = open("unittests/scans/mozilla_observatory/nmap_scanme.json")
parser = MozillaObservatoryParser()
findings = parser.get_findings(testfile, Test())
self.assertEqual(12, len(findings))
for finding in findings:
if "content-security-policy" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Content Security Policy (CSP) header not implemented", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("Content Security Policy (CSP) header not implemented", finding.description)
elif "strict-transport-security" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("HTTP Strict Transport Security (HSTS) header cannot be set, as site contains an invalid certificate chain", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("HTTP Strict Transport Security (HSTS) header cannot be set, as site contains an invalid certificate chain", finding.description)
elif "x-xss-protection" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("X-XSS-Protection header not implemented", finding.title)
self.assertEqual("Low", finding.severity)
self.assertIn("X-XSS-Protection header not implemented", finding.description)
elif "x-frame-options" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("X-Frame-Options (XFO) header not implemented", finding.title)
self.assertEqual("Medium", finding.severity)
self.assertIn("X-Frame-Options (XFO) header not implemented", finding.description)
elif "x-content-type-options" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("X-Content-Type-Options header not implemented", finding.title)
self.assertEqual("Low", finding.severity)
self.assertIn("X-Content-Type-Options header not implemented", finding.description)
elif "subresource-integrity" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or use protocol-relative URLs via src=\"//...\"", finding.title)
self.assertEqual("High", finding.severity)
self.assertIn("Subresource Integrity (SRI) not implemented", finding.description)
elif "redirection" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Initial redirection from HTTP to HTTPS is to a different host, preventing HSTS", finding.title)
self.assertEqual("Low", finding.severity)
self.assertIn("Initial redirection from HTTP to HTTPS is to a different host, preventing HSTS", finding.description)
elif "referrer-policy-private" == finding.vuln_id_from_tool:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertTrue(finding.active)
self.assertEqual("Referrer-Policy header not implemented", finding.title)
self.assertEqual("Info", finding.severity)
self.assertIn("Referrer-Policy header not implemented", finding.description)
else:
with self.subTest(vuln_id_from_tool=finding.vuln_id_from_tool):
self.assertFalse(finding.active)
| 69.877095
| 297
| 0.655421
| 1,406
| 12,508
| 5.653627
| 0.105263
| 0.045289
| 0.075481
| 0.105674
| 0.933702
| 0.925903
| 0.910303
| 0.904768
| 0.873695
| 0.823626
| 0
| 0.001182
| 0.255836
| 12,508
| 178
| 298
| 70.269663
| 0.852815
| 0.008794
| 0
| 0.709091
| 0
| 0.024242
| 0.272845
| 0.062535
| 0
| 0
| 0
| 0
| 0.509091
| 1
| 0.042424
| false
| 0
| 0.018182
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
56c1d3f2c0415aefbfd48de08f03f96170cb5a02
| 33,235
|
py
|
Python
|
sdk/python/pulumi_commercetools/cart_discount.py
|
unplatform-io/pulumi-commercetools
|
b81b998f99995c2ab7eb05a45220d414ae414da3
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-03-05T10:13:36.000Z
|
2021-03-05T10:13:36.000Z
|
sdk/python/pulumi_commercetools/cart_discount.py
|
unplatform-io/pulumi-commercetools
|
b81b998f99995c2ab7eb05a45220d414ae414da3
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-09-17T07:23:39.000Z
|
2021-09-20T12:34:51.000Z
|
sdk/python/pulumi_commercetools/cart_discount.py
|
unplatform-io/pulumi-commercetools
|
b81b998f99995c2ab7eb05a45220d414ae414da3
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['CartDiscountArgs', 'CartDiscount']
@pulumi.input_type
class CartDiscountArgs:
def __init__(__self__, *,
predicate: pulumi.Input[str],
sort_order: pulumi.Input[str],
value: pulumi.Input['CartDiscountValueArgs'],
description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_active: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[Mapping[str, Any]]] = None,
requires_discount_code: Optional[pulumi.Input[bool]] = None,
stacking_mode: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input['CartDiscountTargetArgs']] = None,
valid_from: Optional[pulumi.Input[str]] = None,
valid_until: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a CartDiscount resource.
:param pulumi.Input[str] predicate: A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
:param pulumi.Input[str] sort_order: The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
:param pulumi.Input['CartDiscountValueArgs'] value: Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
:param pulumi.Input[Mapping[str, Any]] description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[bool] is_active: Only active discount can be applied to the cart
:param pulumi.Input[str] key: User-specific unique identifier for a cart discount. Must be unique across a project
:param pulumi.Input[Mapping[str, Any]] name: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[bool] requires_discount_code: States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
:param pulumi.Input[str] stacking_mode: Specifies whether the application of this discount causes the following discounts to be ignored
:param pulumi.Input['CartDiscountTargetArgs'] target: Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
"""
pulumi.set(__self__, "predicate", predicate)
pulumi.set(__self__, "sort_order", sort_order)
pulumi.set(__self__, "value", value)
if description is not None:
pulumi.set(__self__, "description", description)
if is_active is not None:
pulumi.set(__self__, "is_active", is_active)
if key is not None:
pulumi.set(__self__, "key", key)
if name is not None:
pulumi.set(__self__, "name", name)
if requires_discount_code is not None:
pulumi.set(__self__, "requires_discount_code", requires_discount_code)
if stacking_mode is not None:
pulumi.set(__self__, "stacking_mode", stacking_mode)
if target is not None:
pulumi.set(__self__, "target", target)
if valid_from is not None:
pulumi.set(__self__, "valid_from", valid_from)
if valid_until is not None:
pulumi.set(__self__, "valid_until", valid_until)
@property
@pulumi.getter
def predicate(self) -> pulumi.Input[str]:
"""
A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
"""
return pulumi.get(self, "predicate")
@predicate.setter
def predicate(self, value: pulumi.Input[str]):
pulumi.set(self, "predicate", value)
@property
@pulumi.getter(name="sortOrder")
def sort_order(self) -> pulumi.Input[str]:
"""
The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
"""
return pulumi.get(self, "sort_order")
@sort_order.setter
def sort_order(self, value: pulumi.Input[str]):
pulumi.set(self, "sort_order", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input['CartDiscountValueArgs']:
"""
Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input['CartDiscountValueArgs']):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="isActive")
def is_active(self) -> Optional[pulumi.Input[bool]]:
"""
Only active discount can be applied to the cart
"""
return pulumi.get(self, "is_active")
@is_active.setter
def is_active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_active", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
User-specific unique identifier for a cart discount. Must be unique across a project
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="requiresDiscountCode")
def requires_discount_code(self) -> Optional[pulumi.Input[bool]]:
"""
States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
"""
return pulumi.get(self, "requires_discount_code")
@requires_discount_code.setter
def requires_discount_code(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "requires_discount_code", value)
@property
@pulumi.getter(name="stackingMode")
def stacking_mode(self) -> Optional[pulumi.Input[str]]:
"""
Specifies whether the application of this discount causes the following discounts to be ignored
"""
return pulumi.get(self, "stacking_mode")
@stacking_mode.setter
def stacking_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "stacking_mode", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input['CartDiscountTargetArgs']]:
"""
Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input['CartDiscountTargetArgs']]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="validFrom")
def valid_from(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "valid_from")
@valid_from.setter
def valid_from(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "valid_from", value)
@property
@pulumi.getter(name="validUntil")
def valid_until(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "valid_until")
@valid_until.setter
def valid_until(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "valid_until", value)
@pulumi.input_type
class _CartDiscountState:
def __init__(__self__, *,
description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_active: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[Mapping[str, Any]]] = None,
predicate: Optional[pulumi.Input[str]] = None,
requires_discount_code: Optional[pulumi.Input[bool]] = None,
sort_order: Optional[pulumi.Input[str]] = None,
stacking_mode: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input['CartDiscountTargetArgs']] = None,
valid_from: Optional[pulumi.Input[str]] = None,
valid_until: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input['CartDiscountValueArgs']] = None,
version: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering CartDiscount resources.
:param pulumi.Input[Mapping[str, Any]] description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[bool] is_active: Only active discount can be applied to the cart
:param pulumi.Input[str] key: User-specific unique identifier for a cart discount. Must be unique across a project
:param pulumi.Input[Mapping[str, Any]] name: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
:param pulumi.Input[bool] requires_discount_code: States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
:param pulumi.Input[str] sort_order: The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
:param pulumi.Input[str] stacking_mode: Specifies whether the application of this discount causes the following discounts to be ignored
:param pulumi.Input['CartDiscountTargetArgs'] target: Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
:param pulumi.Input['CartDiscountValueArgs'] value: Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
"""
if description is not None:
pulumi.set(__self__, "description", description)
if is_active is not None:
pulumi.set(__self__, "is_active", is_active)
if key is not None:
pulumi.set(__self__, "key", key)
if name is not None:
pulumi.set(__self__, "name", name)
if predicate is not None:
pulumi.set(__self__, "predicate", predicate)
if requires_discount_code is not None:
pulumi.set(__self__, "requires_discount_code", requires_discount_code)
if sort_order is not None:
pulumi.set(__self__, "sort_order", sort_order)
if stacking_mode is not None:
pulumi.set(__self__, "stacking_mode", stacking_mode)
if target is not None:
pulumi.set(__self__, "target", target)
if valid_from is not None:
pulumi.set(__self__, "valid_from", valid_from)
if valid_until is not None:
pulumi.set(__self__, "valid_until", valid_until)
if value is not None:
pulumi.set(__self__, "value", value)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="isActive")
def is_active(self) -> Optional[pulumi.Input[bool]]:
"""
Only active discount can be applied to the cart
"""
return pulumi.get(self, "is_active")
@is_active.setter
def is_active(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_active", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
User-specific unique identifier for a cart discount. Must be unique across a project
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def predicate(self) -> Optional[pulumi.Input[str]]:
"""
A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
"""
return pulumi.get(self, "predicate")
@predicate.setter
def predicate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "predicate", value)
@property
@pulumi.getter(name="requiresDiscountCode")
def requires_discount_code(self) -> Optional[pulumi.Input[bool]]:
"""
States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
"""
return pulumi.get(self, "requires_discount_code")
@requires_discount_code.setter
def requires_discount_code(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "requires_discount_code", value)
@property
@pulumi.getter(name="sortOrder")
def sort_order(self) -> Optional[pulumi.Input[str]]:
"""
The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
"""
return pulumi.get(self, "sort_order")
@sort_order.setter
def sort_order(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sort_order", value)
@property
@pulumi.getter(name="stackingMode")
def stacking_mode(self) -> Optional[pulumi.Input[str]]:
"""
Specifies whether the application of this discount causes the following discounts to be ignored
"""
return pulumi.get(self, "stacking_mode")
@stacking_mode.setter
def stacking_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "stacking_mode", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input['CartDiscountTargetArgs']]:
"""
Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
"""
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input['CartDiscountTargetArgs']]):
pulumi.set(self, "target", value)
@property
@pulumi.getter(name="validFrom")
def valid_from(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "valid_from")
@valid_from.setter
def valid_from(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "valid_from", value)
@property
@pulumi.getter(name="validUntil")
def valid_until(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "valid_until")
@valid_until.setter
def valid_until(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "valid_until", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input['CartDiscountValueArgs']]:
"""
Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input['CartDiscountValueArgs']]):
pulumi.set(self, "value", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "version", value)
class CartDiscount(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_active: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[Mapping[str, Any]]] = None,
predicate: Optional[pulumi.Input[str]] = None,
requires_discount_code: Optional[pulumi.Input[bool]] = None,
sort_order: Optional[pulumi.Input[str]] = None,
stacking_mode: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[pulumi.InputType['CartDiscountTargetArgs']]] = None,
valid_from: Optional[pulumi.Input[str]] = None,
valid_until: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[pulumi.InputType['CartDiscountValueArgs']]] = None,
__props__=None):
"""
Create a CartDiscount resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[bool] is_active: Only active discount can be applied to the cart
:param pulumi.Input[str] key: User-specific unique identifier for a cart discount. Must be unique across a project
:param pulumi.Input[Mapping[str, Any]] name: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
:param pulumi.Input[bool] requires_discount_code: States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
:param pulumi.Input[str] sort_order: The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
:param pulumi.Input[str] stacking_mode: Specifies whether the application of this discount causes the following discounts to be ignored
:param pulumi.Input[pulumi.InputType['CartDiscountTargetArgs']] target: Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
:param pulumi.Input[pulumi.InputType['CartDiscountValueArgs']] value: Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: CartDiscountArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a CartDiscount resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param CartDiscountArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(CartDiscountArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_active: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[Mapping[str, Any]]] = None,
predicate: Optional[pulumi.Input[str]] = None,
requires_discount_code: Optional[pulumi.Input[bool]] = None,
sort_order: Optional[pulumi.Input[str]] = None,
stacking_mode: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[pulumi.InputType['CartDiscountTargetArgs']]] = None,
valid_from: Optional[pulumi.Input[str]] = None,
valid_until: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[pulumi.InputType['CartDiscountValueArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = CartDiscountArgs.__new__(CartDiscountArgs)
__props__.__dict__["description"] = description
__props__.__dict__["is_active"] = is_active
__props__.__dict__["key"] = key
__props__.__dict__["name"] = name
if predicate is None and not opts.urn:
raise TypeError("Missing required property 'predicate'")
__props__.__dict__["predicate"] = predicate
__props__.__dict__["requires_discount_code"] = requires_discount_code
if sort_order is None and not opts.urn:
raise TypeError("Missing required property 'sort_order'")
__props__.__dict__["sort_order"] = sort_order
__props__.__dict__["stacking_mode"] = stacking_mode
__props__.__dict__["target"] = target
__props__.__dict__["valid_from"] = valid_from
__props__.__dict__["valid_until"] = valid_until
if value is None and not opts.urn:
raise TypeError("Missing required property 'value'")
__props__.__dict__["value"] = value
__props__.__dict__["version"] = None
super(CartDiscount, __self__).__init__(
'commercetools:index/cartDiscount:CartDiscount',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
is_active: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[Mapping[str, Any]]] = None,
predicate: Optional[pulumi.Input[str]] = None,
requires_discount_code: Optional[pulumi.Input[bool]] = None,
sort_order: Optional[pulumi.Input[str]] = None,
stacking_mode: Optional[pulumi.Input[str]] = None,
target: Optional[pulumi.Input[pulumi.InputType['CartDiscountTargetArgs']]] = None,
valid_from: Optional[pulumi.Input[str]] = None,
valid_until: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[pulumi.InputType['CartDiscountValueArgs']]] = None,
version: Optional[pulumi.Input[int]] = None) -> 'CartDiscount':
"""
Get an existing CartDiscount resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[bool] is_active: Only active discount can be applied to the cart
:param pulumi.Input[str] key: User-specific unique identifier for a cart discount. Must be unique across a project
:param pulumi.Input[Mapping[str, Any]] name: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
:param pulumi.Input[bool] requires_discount_code: States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
:param pulumi.Input[str] sort_order: The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
:param pulumi.Input[str] stacking_mode: Specifies whether the application of this discount causes the following discounts to be ignored
:param pulumi.Input[pulumi.InputType['CartDiscountTargetArgs']] target: Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
:param pulumi.Input[pulumi.InputType['CartDiscountValueArgs']] value: Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _CartDiscountState.__new__(_CartDiscountState)
__props__.__dict__["description"] = description
__props__.__dict__["is_active"] = is_active
__props__.__dict__["key"] = key
__props__.__dict__["name"] = name
__props__.__dict__["predicate"] = predicate
__props__.__dict__["requires_discount_code"] = requires_discount_code
__props__.__dict__["sort_order"] = sort_order
__props__.__dict__["stacking_mode"] = stacking_mode
__props__.__dict__["target"] = target
__props__.__dict__["valid_from"] = valid_from
__props__.__dict__["valid_until"] = valid_until
__props__.__dict__["value"] = value
__props__.__dict__["version"] = version
return CartDiscount(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="isActive")
def is_active(self) -> pulumi.Output[Optional[bool]]:
"""
Only active discount can be applied to the cart
"""
return pulumi.get(self, "is_active")
@property
@pulumi.getter
def key(self) -> pulumi.Output[Optional[str]]:
"""
User-specific unique identifier for a cart discount. Must be unique across a project
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def name(self) -> pulumi.Output[Mapping[str, Any]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def predicate(self) -> pulumi.Output[str]:
"""
A valid [Cart Predicate](https://docs.commercetools.com/api/projects/predicates#cart-predicates)
"""
return pulumi.get(self, "predicate")
@property
@pulumi.getter(name="requiresDiscountCode")
def requires_discount_code(self) -> pulumi.Output[Optional[bool]]:
"""
States whether the discount can only be used in a connection with a
[DiscountCode](https://docs.commercetools.com/api/projects/discountCodes#discountcode)
"""
return pulumi.get(self, "requires_discount_code")
@property
@pulumi.getter(name="sortOrder")
def sort_order(self) -> pulumi.Output[str]:
"""
The string must contain a number between 0 and 1. All matching cart discounts are applied to a cart in the order defined
by this field. A discount with greater sort order is prioritized higher than a discount with lower sort order. The sort
order is unambiguous among all cart discounts
"""
return pulumi.get(self, "sort_order")
@property
@pulumi.getter(name="stackingMode")
def stacking_mode(self) -> pulumi.Output[Optional[str]]:
"""
Specifies whether the application of this discount causes the following discounts to be ignored
"""
return pulumi.get(self, "stacking_mode")
@property
@pulumi.getter
def target(self) -> pulumi.Output[Optional['outputs.CartDiscountTarget']]:
"""
Empty when the value has type giftLineItem, otherwise a
[CartDiscountTarget](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscounttarget)
"""
return pulumi.get(self, "target")
@property
@pulumi.getter(name="validFrom")
def valid_from(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "valid_from")
@property
@pulumi.getter(name="validUntil")
def valid_until(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "valid_until")
@property
@pulumi.getter
def value(self) -> pulumi.Output['outputs.CartDiscountValue']:
"""
Defines the effect the discount will have.
[CartDiscountValue](https://docs.commercetools.com/api/projects/cartDiscounts#cartdiscountvalue)
"""
return pulumi.get(self, "value")
@property
@pulumi.getter
def version(self) -> pulumi.Output[int]:
return pulumi.get(self, "version")
| 47.682927
| 165
| 0.66108
| 3,840
| 33,235
| 5.548698
| 0.053646
| 0.080537
| 0.091848
| 0.049561
| 0.908715
| 0.891303
| 0.87666
| 0.855963
| 0.849298
| 0.837424
| 0
| 0.000587
| 0.231292
| 33,235
| 696
| 166
| 47.751437
| 0.833412
| 0.348488
| 0
| 0.789227
| 1
| 0
| 0.101676
| 0.03376
| 0
| 0
| 0
| 0
| 0
| 1
| 0.163934
| false
| 0.002342
| 0.016393
| 0.018735
| 0.278689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
85af2759f83bda562e42fb63b42a05dee1b29bd4
| 1,088
|
py
|
Python
|
spaces/managers.py
|
dhavall13/fehler_core
|
dd27802d5b227a32aebcc8bfde68e78a69a36d66
|
[
"MIT"
] | null | null | null |
spaces/managers.py
|
dhavall13/fehler_core
|
dd27802d5b227a32aebcc8bfde68e78a69a36d66
|
[
"MIT"
] | null | null | null |
spaces/managers.py
|
dhavall13/fehler_core
|
dd27802d5b227a32aebcc8bfde68e78a69a36d66
|
[
"MIT"
] | null | null | null |
from django.db import models
class OwnerManager(models.Manager):
use_in_migrations = True
def get_queryset(self, *args, **kwargs):
return (
super().get_queryset(*args, **kwargs).filter(member_type=self.model.Owner)
)
class AdminManager(models.Manager):
use_in_migrations = True
def get_queryset(self, *args, **kwargs):
return (
super().get_queryset(*args, **kwargs).filter(member_type=self.model.ADMIN)
)
# class ProjectManagerManager(models.Manager):
# use_in_migrations = True
# def get_queryset(self, *args, **kwargs):
# return (
# super()
# .get_queryset(*args, **kwargs)
# .filter(member_type=self.model.PROJECT_MANAGER)
# )
# class TeamLeadManager(models.Manager):
# use_in_migrations = True
# def get_queryset(self, *args, **kwargs):
# return (
# super()
# .get_queryset(*args, **kwargs)
# .filter(member_type=self.model.TEAM_LEAD)
# )
| 25.904762
| 87
| 0.575368
| 111
| 1,088
| 5.441441
| 0.288288
| 0.145695
| 0.10596
| 0.119205
| 0.774834
| 0.774834
| 0.774834
| 0.774834
| 0.774834
| 0.774834
| 0
| 0
| 0.298713
| 1,088
| 41
| 88
| 26.536585
| 0.791612
| 0.482537
| 0
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.076923
| 0.153846
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
85bc690aaa418d186e01a7a0bdce0df9fa058469
| 38,402
|
py
|
Python
|
GenerateSyntheticData.py
|
eugenewickett/logistigateanalysis
|
5174f40db5f79bfd12491850cef53edde825b71b
|
[
"MIT"
] | null | null | null |
GenerateSyntheticData.py
|
eugenewickett/logistigateanalysis
|
5174f40db5f79bfd12491850cef53edde825b71b
|
[
"MIT"
] | null | null | null |
GenerateSyntheticData.py
|
eugenewickett/logistigateanalysis
|
5174f40db5f79bfd12491850cef53edde825b71b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Script that generates and analyzes a synthetic set of PMS data. These data differ from the data used in the paper but
capture important elements of what is presented in the paper.
Inference generation requires use of the logistigate package, available at https://logistigate.readthedocs.io/en/main/. Running
the generateSyntheticData() function generates Figures 2, 3, and 4, as well as the interval widths for Tables 1 and 2,
that are analagous to the items produced using the de-identified data.
'''
from logistigate.logistigate import utilities as util # Pull from the submodule "develop" branch
from logistigate.logistigate import methods
from logistigate.logistigate import lg
def generateSyntheticData():
'''
Script for forming a synthetic data set of 25 test nodes and 25 supply nodes.
'''
'''
Use a generated sourcing-probability matrix to produce 500 samples under specified random seeds
'''
import numpy as np
import random
Qrow = np.array([.01, .01, .01, .01, .01, .01, .01, .01, .01, .01, .01, .01,
.02, .02, .02, .03, .03, .05, .05, .07, .07, .07, .10, .15, .20])
random.seed(3)
random.shuffle(Qrow)
# Qrow: [0.01, 0.03, 0.1 , 0.02, 0.01, 0.01, 0.07, 0.01, 0.01, 0.02, 0.2, 0.02,
# 0.01, 0.01, 0.07, 0.15, 0.01, 0.01, 0.03, 0.07, 0.01, 0.01, 0.05, 0.05, 0.01])
# SN rates: 1% baseline; 20% node: 25%, 5% node: ~25/30%, 7% node: 10%, 2% node: 40%
# TN rates: 1% baseline; 1 major node: 25%, 1 minor node: 30%; 3 minor nodes: 10%; 1 minor minor node: 50%
numTN, numSN = 25, 25
numSamples = 500
s, r = 1.0, 1.0
SNnames = ['Manufacturer ' + str(i + 1) for i in range(numSN)]
TNnames = ['District ' + str(i + 1) for i in range(numTN)]
trueRates = np.zeros(numSN + numTN) # importers first, outlets second
SNtrueRates = [.02 for i in range(numSN)]
SN1ind = 3 # 40% SFP rate
SN2ind = 10 # 25% SFP rate, major node
SN3ind = 14 # 10% SFP rate, minor node
SN4ind = 22 # 20% SFP rate, minor node
SNtrueRates[SN1ind], SNtrueRates[SN2ind] = 0.35, 0.25
SNtrueRates[SN3ind], SNtrueRates[SN4ind] = 0.1, 0.25
trueRates[:numSN] = SNtrueRates # SN SFP rates
TN1ind = 5 # 20% sampled node, 25% SFP rate
TN2inds = [2, 11, 14, 22] # 10% sampled
TN3inds = [3, 6, 8, 10, 16, 17, 24] # 3% sampled
TN4inds = [0, 1, 9, 12, 18, 23] # 2% sampled
TNsampProbs = [.01 for i in range(numTN)] # Update sampling probs
TNsampProbs[TN1ind] = 0.20
for j in TN2inds:
TNsampProbs[j] = 0.10
for j in TN3inds:
TNsampProbs[j] = 0.03
for j in TN4inds:
TNsampProbs[j] = 0.02
#print(np.sum(TNsampProbs)) # sampling probability should add up to 1.0
TNtrueRates = [.02 for i in range(numTN)] # Update SFP rates for TNs
TNtrueRates[TN1ind] = 0.2
TNtrueRates[TN2inds[1]] = 0.1
TNtrueRates[TN2inds[2]] = 0.1
TNtrueRates[TN3inds[1]] = 0.4
trueRates[numSN:] = TNtrueRates # Put TN rates in main vector
rseed = 56 # Change the seed here to get a different set of tests
random.seed(rseed)
np.random.seed(rseed+1)
testingDataList = []
for currSamp in range(numSamples):
currTN = random.choices(TNnames, weights=TNsampProbs, k=1)[0]
#if not currTN == 'District '
currSN = random.choices(SNnames, weights=Qrow, k=1)[0] #[TNnames.index(currTN)] to index Q
currTNrate = trueRates[numSN + TNnames.index(currTN)]
currSNrate = trueRates[SNnames.index(currSN)]
realRate = currTNrate + currSNrate - currTNrate * currSNrate
realResult = np.random.binomial(1, p=realRate)
if realResult == 1:
result = np.random.binomial(1, p = s)
if realResult == 0:
result = np.random.binomial(1, p = 1. - r)
testingDataList.append([currTN, currSN, result])
# Inspect testing data; check: (1) overall SFP rate, (2) plots, (3) N, Y matrices align more or less with
# statements from case-study section
priorMean, priorScale = -2.5, 1.3
numPostSamps = 1000
MCMCdict = {'MCMCtype': 'NUTS', 'Madapt': 5000, 'delta': 0.4}
lowerQuant, upperQuant = 0.05, 0.95
import scipy.special as spsp
import scipy.stats as sps
import matplotlib.pyplot as plt
priorLower = spsp.expit(sps.laplace.ppf(lowerQuant, loc=priorMean, scale=priorScale))
priorUpper = spsp.expit(sps.laplace.ppf(upperQuant, loc=priorMean, scale=priorScale))
lgDict = util.testresultsfiletotable(testingDataList, csvName=False)
print('size: '+str(lgDict['N'].shape)+', obsvns: '+str(lgDict['N'].sum())+', propor pos: '+str(lgDict['Y'].sum() / lgDict['N'].sum()))
lgDict.update({'diagSens': 1.0, 'diagSpec': 1.0, 'numPostSamples': numPostSamps,
'prior': methods.prior_laplace(mu=priorMean, scale=priorScale), 'MCMCdict': MCMCdict})
lgDict = lg.runlogistigate(lgDict)
numSN, numTN = lgDict['importerNum'], lgDict['outletNum']
floorVal = 0.05 # Classification lines
ceilVal = 0.25
# Supply-node plot
SNindsSubset = range(numSN)
SNnames = [lgDict['importerNames'][i] for i in SNindsSubset]
SNlowers = [np.quantile(lgDict['postSamples'][:, l], lowerQuant) for l in SNindsSubset]
SNuppers = [np.quantile(lgDict['postSamples'][:, l], upperQuant) for l in SNindsSubset]
# First group
SNlowers1 = [i for i in SNlowers if i > floorVal]
SNuppers1 = [SNuppers[ind] for ind, i in enumerate(SNlowers) if i > floorVal]
SNnames1 = [SNnames[ind] for ind, i in enumerate(SNlowers) if i > floorVal]
midpoints1 = [SNuppers1[i] - (SNuppers1[i] - SNlowers1[i]) / 2 for i in range(len(SNuppers1))]
zippedList1 = zip(midpoints1, SNuppers1, SNlowers1, SNnames1)
sorted_pairs1 = sorted(zippedList1, reverse=True)
SNnamesSorted1 = [tup[-1] for tup in sorted_pairs1]
# Second group
SNuppers2 = [i for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
SNlowers2 = [SNlowers[ind] for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
SNnames2 = [SNnames[ind] for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
midpoints2 = [SNuppers2[i] - (SNuppers2[i] - SNlowers2[i]) / 2 for i in range(len(SNuppers2))]
zippedList2 = zip(midpoints2, SNuppers2, SNlowers2, SNnames2)
sorted_pairs2 = sorted(zippedList2, reverse=True)
SNnamesSorted2 = [tup[-1] for tup in sorted_pairs2]
# Third group
SNuppers3 = [i for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
SNlowers3 = [SNlowers[ind] for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
SNnames3 = [SNnames[ind] for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
midpoints3 = [SNuppers3[i] - (SNuppers3[i] - SNlowers3[i]) / 2 for i in range(len(SNuppers3))]
zippedList3 = zip(midpoints3, SNuppers3, SNlowers3, SNnames3)
sorted_pairs3 = sorted(zippedList3, reverse=True)
SNnamesSorted3 = [tup[-1] for tup in sorted_pairs3]
# Combine groups
SNnamesSorted = SNnamesSorted1.copy()
SNnamesSorted.append(' ')
SNnamesSorted = SNnamesSorted + SNnamesSorted2
SNnamesSorted.append(' ')
SNnamesSorted = SNnamesSorted + SNnamesSorted3
SNnamesSorted.append(' ')
SNnamesSorted.append('(Prior)')
fig, (ax) = plt.subplots(figsize=(10, 6), ncols=1)
for _, upper, lower, name in sorted_pairs1:
plt.plot((name, name), (lower, upper), 'o-', color='red')
plt.plot(('', ''), (np.nan, np.nan), 'o-', color='red')
for _, upper, lower, name in sorted_pairs2:
plt.plot((name, name), (lower, upper), 'o--', color='orange')
plt.plot((' ', ' '), (np.nan, np.nan), 'o--', color='orange')
for _, upper, lower, name in sorted_pairs3:
plt.plot((name, name), (lower, upper), 'o:', color='green')
plt.plot((' ', ' '), (np.nan, np.nan), 'o:', color='green')
plt.plot((SNnamesSorted[-1], SNnamesSorted[-1]), (priorLower, priorUpper), 'o-', color='gray')
plt.ylim([0, 1])
plt.xticks(range(len(SNnamesSorted)), SNnamesSorted, rotation=90)
plt.title('Supply Node 90% Intervals\nManufacturer-District Analysis, Tracked Setting',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Supply Node Name', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontname('Times New Roman')
label.set_fontsize(12)
plt.axhline(y=floorVal, color='r', linestyle='-', alpha=0.1) # line for 'l'
plt.axhline(y=ceilVal, color='blue', linestyle='-', alpha=0.1) # line for 'u'
plt.text(26.3, ceilVal + .015, 'u=0.25', color='blue', alpha=0.5, size=9)
plt.text(26.3, floorVal + .015, 'l=0.05', color='r', alpha=0.5, size=9)
fig.tight_layout()
plt.show()
plt.close()
# Test-node plot
TNindsSubset = range(numTN)
TNnames = [lgDict['outletNames'][i] for i in TNindsSubset]
TNlowers = [np.quantile(lgDict['postSamples'][:, numSN + l], lowerQuant) for l in TNindsSubset]
TNuppers = [np.quantile(lgDict['postSamples'][:, numSN + l], upperQuant) for l in TNindsSubset]
# First group
TNlowers1 = [i for i in TNlowers if i > floorVal]
TNuppers1 = [TNuppers[ind] for ind, i in enumerate(TNlowers) if i > floorVal]
TNnames1 = [TNnames[ind] for ind, i in enumerate(TNlowers) if i > floorVal]
midpoints1 = [TNuppers1[i] - (TNuppers1[i] - TNlowers1[i]) / 2 for i in range(len(TNuppers1))]
zippedList1 = zip(midpoints1, TNuppers1, TNlowers1, TNnames1)
sorted_pairs1 = sorted(zippedList1, reverse=True)
TNnamesSorted1 = [tup[-1] for tup in sorted_pairs1]
# Second group
TNuppers2 = [i for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
TNlowers2 = [TNlowers[ind] for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
TNnames2 = [TNnames[ind] for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
midpoints2 = [TNuppers2[i] - (TNuppers2[i] - TNlowers2[i]) / 2 for i in range(len(TNuppers2))]
zippedList2 = zip(midpoints2, TNuppers2, TNlowers2, TNnames2)
sorted_pairs2 = sorted(zippedList2, reverse=True)
TNnamesSorted2 = [tup[-1] for tup in sorted_pairs2]
# Third group
TNuppers3 = [i for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
TNlowers3 = [TNlowers[ind] for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
TNnames3 = [TNnames[ind] for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
midpoints3 = [TNuppers3[i] - (TNuppers3[i] - TNlowers3[i]) / 2 for i in range(len(TNuppers3))]
zippedList3 = zip(midpoints3, TNuppers3, TNlowers3, TNnames3)
sorted_pairs3 = sorted(zippedList3, reverse=True)
TNnamesSorted3 = [tup[-1] for tup in sorted_pairs3]
# Combine groups
TNnamesSorted = TNnamesSorted1.copy()
TNnamesSorted.append(' ')
TNnamesSorted = TNnamesSorted + TNnamesSorted2
TNnamesSorted.append(' ')
TNnamesSorted = TNnamesSorted + TNnamesSorted3
TNnamesSorted.append(' ')
TNnamesSorted.append('(Prior)')
fig, (ax) = plt.subplots(figsize=(10, 6), ncols=1)
for _, upper, lower, name in sorted_pairs1:
plt.plot((name, name), (lower, upper), 'o-', color='red')
plt.plot(('', ''), (np.nan, np.nan), 'o-', color='red')
for _, upper, lower, name in sorted_pairs2:
plt.plot((name, name), (lower, upper), 'o--', color='orange')
plt.plot((' ', ' '), (np.nan, np.nan), 'o--', color='orange')
for _, upper, lower, name in sorted_pairs3:
plt.plot((name, name), (lower, upper), 'o:', color='green')
plt.plot((' ', ' '), (np.nan, np.nan), 'o:', color='green')
plt.plot((TNnamesSorted[-1], TNnamesSorted[-1]), (priorLower, priorUpper), 'o-', color='gray')
plt.ylim([0, 1])
plt.xticks(range(len(TNnamesSorted)), TNnamesSorted, rotation=90)
plt.title('Test Node 90% Intervals\nManufacturer-District Analysis, Tracked Setting',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Test Node Name', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontname('Times New Roman')
label.set_fontsize(12)
plt.axhline(y=floorVal, color='r', linestyle='-', alpha=0.1) # line for 'l'
plt.axhline(y=ceilVal, color='blue', linestyle='-', alpha=0.1) # line for 'u'
plt.text(26.4, ceilVal + .015, 'u=0.25', color='blue', alpha=0.5, size=9)
plt.text(26.4, floorVal + .015, 'l=0.05', color='r', alpha=0.5, size=9)
fig.tight_layout()
plt.show()
plt.close()
# How many observed arcs are there?
#np.count_nonzero(lgDict['N'])
'''
# Inspect raw data totals
# Supply nodes
for i in range(numSN): # sum across TNs to see totals for SNs
currTotal = np.sum(lgDict['N'],axis=0)[i]
currPos = np.sum(lgDict['Y'],axis=0)[i]
print(lgDict['importerNames'][i]+': ' +str(currTotal)[:-2]+' samples, '
+ str(currPos)[:-2] + ' positives, ' + str(currPos/currTotal)[:5] + ' rate')
# Test nodes
for i in range(numTN): # sum across SNs to see totals for TNs
currTotal = np.sum(lgDict['N'],axis=1)[i]
currPos = np.sum(lgDict['Y'],axis=1)[i]
print(lgDict['outletNames'][i]+': ' +str(currTotal)[:-2]+' samples, '
+ str(currPos)[:-2] + ' positives, ' + str(currPos/currTotal)[:5] + ' rate')
# SNs, TNs with at least ten samples and 10% SFP rate
for i in range(numSN): # sum across TNs to see totals for SNs
currTotal = np.sum(lgDict['N'],axis=0)[i]
currPos = np.sum(lgDict['Y'],axis=0)[i]
if currPos/currTotal>0.1 and currTotal>10:
print(lgDict['importerNames'][i]+': ' +str(currTotal)[:-2]+' samples, '
+ str(currPos)[:-2] + ' positives, ' + str(currPos/currTotal)[:5] + ' rate')
# Test nodes
for i in range(numTN): # sum across SNs to see totals for TNs
currTotal = np.sum(lgDict['N'],axis=1)[i]
currPos = np.sum(lgDict['Y'],axis=1)[i]
if currPos / currTotal > 0.1 and currTotal > 10:
print(lgDict['outletNames'][i]+': ' +str(currTotal)[:-2]+' samples, '
+ str(currPos)[:-2] + ' positives, ' + str(currPos/currTotal)[:5] + ' rate')
# 90% intervals for SFP rates at SNs, TNs, using proportion CI
for i in range(numSN): # sum across TNs to see totals for SNs
currTotal = np.sum(lgDict['N'], axis=0)[i]
currPos = np.sum(lgDict['Y'], axis=0)[i]
pHat = currPos/currTotal
lowerBd = pHat-(1.645*np.sqrt(pHat*(1-pHat)/currTotal))
upperBd = pHat+(1.645*np.sqrt(pHat*(1-pHat)/currTotal))
print(lgDict['importerNames'][i]+': ('+str(lowerBd)[:5]+', '+str(upperBd)[:5]+')')
# Test nodes
for i in range(numTN): # sum across SNs to see totals for TNs
currTotal = np.sum(lgDict['N'], axis=1)[i]
currPos = np.sum(lgDict['Y'], axis=1)[i]
pHat = currPos / currTotal
lowerBd = pHat - (1.645 * np.sqrt(pHat * (1 - pHat) / currTotal))
upperBd = pHat + (1.645 * np.sqrt(pHat * (1 - pHat) / currTotal))
print(lgDict['outletNames'][i] + ': (' + str(lowerBd)[:5] + ', ' + str(upperBd)[:5] + ')')
# Print quantiles for analysis tables
SNinds = lgDict['importerNames'].index('Manufacturer 4')
print('Manufacturer 4: (' + str(np.quantile(lgDict['postSamples'][:, SNinds], 0.05))[:5] + ',' + str(
np.quantile(lgDict['postSamples'][:, SNinds], 0.95))[:5] + ')')
SNinds = lgDict['importerNames'].index('Manufacturer 11')
print('Manufacturer 11: (' + str(np.quantile(lgDict['postSamples'][:, SNinds], 0.05))[:5] + ',' + str(
np.quantile(lgDict['postSamples'][:, SNinds], 0.95))[:5] + ')')
SNinds = lgDict['importerNames'].index('Manufacturer 23')
print('Manufacturer 23: (' + str(np.quantile(lgDict['postSamples'][:, SNinds], 0.05))[:5] + ',' + str(
np.quantile(lgDict['postSamples'][:, SNinds], 0.95))[:5] + ')')
TNinds = lgDict['outletNames'].index('District 6')
print('District 6: (' + str(np.quantile(lgDict['postSamples'][:, len(lgDict['importerNames']) + TNinds], 0.05))[
:5] + ',' + str(np.quantile(lgDict['postSamples'][:, len(lgDict['importerNames']) + TNinds], 0.95))[:5] + ')')
TNinds = lgDict['outletNames'].index('District 7')
print('District 7: (' + str(np.quantile(lgDict['postSamples'][:, len(lgDict['importerNames']) + TNinds], 0.05))[
:5] + ',' + str(np.quantile(lgDict['postSamples'][:, len(lgDict['importerNames']) + TNinds], 0.95))[:5] + ')')
'''
# Untracked
lgDict = {}
lgDict = util.testresultsfiletotable(testingDataList, csvName=False)
Qest = lgDict['N'].copy() # Generate Q
for i, Nrow in enumerate(lgDict['N']):
Qest[i] = Nrow / np.sum(Nrow)
# Update N and Y
lgDict.update({'N': np.sum(lgDict['N'], axis=1), 'Y': np.sum(lgDict['Y'], axis=1)})
print('size: ' + str(lgDict['N'].shape) + ', obsvns: ' + str(lgDict['N'].sum()) + ', propor pos: ' + str(
lgDict['Y'].sum() / lgDict['N'].sum()))
lgDict.update({'type': 'Untracked','diagSens': 1.0, 'diagSpec': 1.0, 'numPostSamples': numPostSamps,
'prior': methods.prior_laplace(mu=priorMean, scale=priorScale), 'MCMCdict': MCMCdict,
'transMat': Qest, 'importerNum': Qest.shape[1], 'outletNum': Qest.shape[0]})
lgDict = methods.GeneratePostSamples(lgDict)
numSN, numTN = lgDict['importerNum'], lgDict['outletNum']
SNindsSubset = range(numSN)
SNnames = [lgDict['importerNames'][i] for i in SNindsSubset]
SNlowers = [np.quantile(lgDict['postSamples'][:, l], lowerQuant) for l in SNindsSubset]
SNuppers = [np.quantile(lgDict['postSamples'][:, l], upperQuant) for l in SNindsSubset]
# First group
SNlowers1 = [i for i in SNlowers if i > floorVal]
SNuppers1 = [SNuppers[ind] for ind, i in enumerate(SNlowers) if i > floorVal]
SNnames1 = [SNnames[ind] for ind, i in enumerate(SNlowers) if i > floorVal]
midpoints1 = [SNuppers1[i] - (SNuppers1[i] - SNlowers1[i]) / 2 for i in range(len(SNuppers1))]
zippedList1 = zip(midpoints1, SNuppers1, SNlowers1, SNnames1)
sorted_pairs1 = sorted(zippedList1, reverse=True)
SNnamesSorted1 = [tup[-1] for tup in sorted_pairs1]
# Second group
SNuppers2 = [i for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
SNlowers2 = [SNlowers[ind] for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
SNnames2 = [SNnames[ind] for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
midpoints2 = [SNuppers2[i] - (SNuppers2[i] - SNlowers2[i]) / 2 for i in range(len(SNuppers2))]
zippedList2 = zip(midpoints2, SNuppers2, SNlowers2, SNnames2)
sorted_pairs2 = sorted(zippedList2, reverse=True)
SNnamesSorted2 = [tup[-1] for tup in sorted_pairs2]
# Third group
SNuppers3 = [i for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
SNlowers3 = [SNlowers[ind] for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
SNnames3 = [SNnames[ind] for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
midpoints3 = [SNuppers3[i] - (SNuppers3[i] - SNlowers3[i]) / 2 for i in range(len(SNuppers3))]
zippedList3 = zip(midpoints3, SNuppers3, SNlowers3, SNnames3)
sorted_pairs3 = sorted(zippedList3, reverse=True)
SNnamesSorted3 = [tup[-1] for tup in sorted_pairs3]
# Combine groups
SNnamesSorted = SNnamesSorted1.copy()
SNnamesSorted.append(' ')
SNnamesSorted = SNnamesSorted + SNnamesSorted2
SNnamesSorted.append(' ')
SNnamesSorted = SNnamesSorted + SNnamesSorted3
SNnamesSorted.append(' ')
SNnamesSorted.append('(Prior)')
fig, (ax) = plt.subplots(figsize=(10, 6), ncols=1)
for _, upper, lower, name in sorted_pairs1:
plt.plot((name, name), (lower, upper), 'o-', color='red')
plt.plot(('', ''), (np.nan, np.nan), 'o-', color='red')
for _, upper, lower, name in sorted_pairs2:
plt.plot((name, name), (lower, upper), 'o--', color='orange')
plt.plot((' ', ' '), (np.nan, np.nan), 'o--', color='orange')
for _, upper, lower, name in sorted_pairs3:
plt.plot((name, name), (lower, upper), 'o:', color='green')
plt.plot((' ', ' '), (np.nan, np.nan), 'o:', color='green')
plt.plot((SNnamesSorted[-1], SNnamesSorted[-1]), (priorLower, priorUpper), 'o-', color='gray')
plt.ylim([0, 1])
plt.xticks(range(len(SNnamesSorted)), SNnamesSorted, rotation=90)
plt.title('Supply Node 90% Intervals\nManufacturer-District Analysis, Untracked Setting',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Supply Node Name', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontname('Times New Roman')
label.set_fontsize(12)
plt.axhline(y=floorVal, color='r', linestyle='-', alpha=0.1) # line for 'l'
plt.axhline(y=ceilVal, color='blue', linestyle='-', alpha=0.1) # line for 'u'
plt.text(26.3, ceilVal + .015, 'u=0.25', color='blue', alpha=0.5, size=9)
plt.text(26.3, floorVal + .015, 'l=0.05', color='r', alpha=0.5, size=9)
fig.tight_layout()
plt.show()
plt.close()
# Test-node plot
TNindsSubset = range(numTN)
TNnames = [lgDict['outletNames'][i] for i in TNindsSubset]
TNlowers = [np.quantile(lgDict['postSamples'][:, numSN + l], lowerQuant) for l in TNindsSubset]
TNuppers = [np.quantile(lgDict['postSamples'][:, numSN + l], upperQuant) for l in TNindsSubset]
# First group
TNlowers1 = [i for i in TNlowers if i > floorVal]
TNuppers1 = [TNuppers[ind] for ind, i in enumerate(TNlowers) if i > floorVal]
TNnames1 = [TNnames[ind] for ind, i in enumerate(TNlowers) if i > floorVal]
midpoints1 = [TNuppers1[i] - (TNuppers1[i] - TNlowers1[i]) / 2 for i in range(len(TNuppers1))]
zippedList1 = zip(midpoints1, TNuppers1, TNlowers1, TNnames1)
sorted_pairs1 = sorted(zippedList1, reverse=True)
TNnamesSorted1 = [tup[-1] for tup in sorted_pairs1]
# Second group
TNuppers2 = [i for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
TNlowers2 = [TNlowers[ind] for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
TNnames2 = [TNnames[ind] for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
midpoints2 = [TNuppers2[i] - (TNuppers2[i] - TNlowers2[i]) / 2 for i in range(len(TNuppers2))]
zippedList2 = zip(midpoints2, TNuppers2, TNlowers2, TNnames2)
sorted_pairs2 = sorted(zippedList2, reverse=True)
TNnamesSorted2 = [tup[-1] for tup in sorted_pairs2]
# Third group
TNuppers3 = [i for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
TNlowers3 = [TNlowers[ind] for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
TNnames3 = [TNnames[ind] for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
midpoints3 = [TNuppers3[i] - (TNuppers3[i] - TNlowers3[i]) / 2 for i in range(len(TNuppers3))]
zippedList3 = zip(midpoints3, TNuppers3, TNlowers3, TNnames3)
sorted_pairs3 = sorted(zippedList3, reverse=True)
TNnamesSorted3 = [tup[-1] for tup in sorted_pairs3]
# Combine groups
TNnamesSorted = TNnamesSorted1.copy()
TNnamesSorted.append(' ')
TNnamesSorted = TNnamesSorted + TNnamesSorted2
TNnamesSorted.append(' ')
TNnamesSorted = TNnamesSorted + TNnamesSorted3
TNnamesSorted.append(' ')
TNnamesSorted.append('(Prior)')
fig, (ax) = plt.subplots(figsize=(10, 6), ncols=1)
for _, upper, lower, name in sorted_pairs1:
plt.plot((name, name), (lower, upper), 'o-', color='red')
plt.plot(('', ''), (np.nan, np.nan), 'o-', color='red')
for _, upper, lower, name in sorted_pairs2:
plt.plot((name, name), (lower, upper), 'o--', color='orange')
plt.plot((' ', ' '), (np.nan, np.nan), 'o--', color='orange')
for _, upper, lower, name in sorted_pairs3:
plt.plot((name, name), (lower, upper), 'o:', color='green')
plt.plot((' ', ' '), (np.nan, np.nan), 'o:', color='green')
plt.plot((TNnamesSorted[-1], TNnamesSorted[-1]), (priorLower, priorUpper), 'o-', color='gray')
plt.ylim([0, 1])
plt.xticks(range(len(TNnamesSorted)), TNnamesSorted, rotation=90)
plt.title('Test Node 90% Intervals\nManufacturer-District Analysis, Untracked Setting',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Test Node Name', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontname('Times New Roman')
label.set_fontsize(12)
plt.axhline(y=floorVal, color='r', linestyle='-', alpha=0.1) # line for 'l'
plt.axhline(y=ceilVal, color='blue', linestyle='-', alpha=0.1) # line for 'u'
plt.text(26.4, ceilVal + .015, 'u=0.25', color='blue', alpha=0.5, size=9)
plt.text(26.4, floorVal + .015, 'l=0.05', color='r', alpha=0.5, size=9)
fig.tight_layout()
plt.show()
plt.close()
# Generate data with a different Q, same underlying SFP rates, UNTRACKED
Qrow = np.array([.01, .01, .01, .01, .01, .01, .01, .01, .01, .01, .01, .01,
.02, .02, .02, .03, .03, .05, .05, .07, .07, .07, .10, .15, .20])
Q = np.zeros(shape=(numTN,numSN))
#base = 4
for i in range(numTN):
Qrow = np.array([.0, .0, .0, .0, .0, .0, .0, .0, .0, .0, .0, .0,
.0, .0, .0, .0, .0, .0, .0, .0, .0, .1, .1, .2, .6])
'''
if i < 4:
random.seed(4+base)
elif i >= 4 and i < 8:
random.seed(5+base)
elif i >= 8 and i < 12:
random.seed(6+base)
elif i >= 12 and i < 16:
random.seed(7+base)
elif i >= 16:
random.seed(8+base)
'''
random.seed(i+10)
random.shuffle(Qrow)
random.shuffle(Qrow)
Q[i] = Qrow
'''
for i in range(numSN):
if np.sum(Q[:,i]) == 0.0:
print(i)
print(np.sum(Q[:,i]))
'''
# Overall SFP rate: 10-20%
# SN rates: 1% baseline; 20% node: 25%, 5% node: ~25/30%, 7% node: 10%, 2% node: 40%
# TN rates: 1% baseline; 1 major node: 25%, 1 minor node: 30%; 3 minor nodes: 10%; 1 minor minor node: 50%
numTN, numSN = 25, 25
numSamples = 500
s, r = 1.0, 1.0
SNnames = ['Manufacturer ' + str(i + 1) for i in range(numSN)]
TNnames = ['District ' + str(i + 1) for i in range(numTN)]
trueRates = np.zeros(numSN + numTN) # importers first, outlets second
SNtrueRates = [.02 for i in range(numSN)]
SN1ind = 3 # 40% SFP rate
SN2ind = 10 # 25% SFP rate, major node
SN3ind = 14 # 10% SFP rate, minor node
SN4ind = 22 # 20% SFP rate, minor node
SNtrueRates[SN1ind], SNtrueRates[SN2ind] = 0.35, 0.25
SNtrueRates[SN3ind], SNtrueRates[SN4ind] = 0.1, 0.25
trueRates[:numSN] = SNtrueRates # SN SFP rates
TN1ind = 5 # 20% sampled node, 25% SFP rate
TN2inds = [2, 11, 14, 22] # 10% sampled
TN3inds = [3, 6, 8, 10, 16, 17, 24] # 3% sampled
TN4inds = [0, 1, 9, 12, 18, 23] # 2% sampled
TNsampProbs = [.01 for i in range(numTN)] # Update sampling probs
TNsampProbs[TN1ind] = 0.20
for j in TN2inds:
TNsampProbs[j] = 0.10
for j in TN3inds:
TNsampProbs[j] = 0.03
for j in TN4inds:
TNsampProbs[j] = 0.02
print(np.sum(TNsampProbs)) # sampling probability should add up to 1.0
TNtrueRates = [.01 for i in range(numTN)] # Update SFP rates for TNs
TNtrueRates[TN1ind] = 0.2
TNtrueRates[TN2inds[1]] = 0.1
TNtrueRates[TN2inds[2]] = 0.1
TNtrueRates[TN3inds[1]] = 0.4
trueRates[numSN:] = TNtrueRates # Put TN rates in main vector
rseed = 56 # Change the seed here to get a different set of tests
random.seed(rseed)
np.random.seed(rseed + 1)
testingDataList = []
for currSamp in range(numSamples):
currTN = random.choices(TNnames, weights=TNsampProbs, k=1)[0]
currTNind = TNnames.index(currTN)
# if not currTN == 'District '
currSN = random.choices(SNnames, weights=Q[currTNind], k=1)[0] # [TNnames.index(currTN)] to index Q
currTNrate = trueRates[numSN + TNnames.index(currTN)]
currSNrate = trueRates[SNnames.index(currSN)]
realRate = currTNrate + currSNrate - currTNrate * currSNrate
realResult = np.random.binomial(1, p=realRate)
if realResult == 1:
result = np.random.binomial(1, p=s)
if realResult == 0:
result = np.random.binomial(1, p=1. - r)
testingDataList.append([currTN, currSN, result])
lgDict = {}
lgDict = util.testresultsfiletotable(testingDataList, csvName=False)
Qest = lgDict['N'].copy() # Generate Q
for i, Nrow in enumerate(lgDict['N']):
Qest[i] = Nrow / np.sum(Nrow)
# Update N and Y
lgDict.update({'N': np.sum(lgDict['N'], axis=1), 'Y': np.sum(lgDict['Y'], axis=1)})
print('size: ' + str(lgDict['N'].shape) + ', obsvns: ' + str(lgDict['N'].sum()) + ', propor pos: ' + str(
lgDict['Y'].sum() / lgDict['N'].sum()))
lgDict.update({'type': 'Untracked', 'diagSens': 1.0, 'diagSpec': 1.0, 'numPostSamples': numPostSamps,
'prior': methods.prior_laplace(mu=priorMean, scale=priorScale), 'MCMCdict': MCMCdict,
'transMat': Qest, 'importerNum': Qest.shape[1], 'outletNum': Qest.shape[0]})
lgDict = methods.GeneratePostSamples(lgDict)
numSN, numTN = lgDict['importerNum'], lgDict['outletNum']
SNindsSubset = range(numSN)
SNnames = [lgDict['importerNames'][i] for i in SNindsSubset]
SNlowers = [np.quantile(lgDict['postSamples'][:, l], lowerQuant) for l in SNindsSubset]
SNuppers = [np.quantile(lgDict['postSamples'][:, l], upperQuant) for l in SNindsSubset]
# First group
SNlowers1 = [i for i in SNlowers if i > floorVal]
SNuppers1 = [SNuppers[ind] for ind, i in enumerate(SNlowers) if i > floorVal]
SNnames1 = [SNnames[ind] for ind, i in enumerate(SNlowers) if i > floorVal]
midpoints1 = [SNuppers1[i] - (SNuppers1[i] - SNlowers1[i]) / 2 for i in range(len(SNuppers1))]
zippedList1 = zip(midpoints1, SNuppers1, SNlowers1, SNnames1)
sorted_pairs1 = sorted(zippedList1, reverse=True)
SNnamesSorted1 = [tup[-1] for tup in sorted_pairs1]
# Second group
SNuppers2 = [i for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
SNlowers2 = [SNlowers[ind] for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
SNnames2 = [SNnames[ind] for ind, i in enumerate(SNuppers) if (i > ceilVal and SNlowers[ind] <= floorVal)]
midpoints2 = [SNuppers2[i] - (SNuppers2[i] - SNlowers2[i]) / 2 for i in range(len(SNuppers2))]
zippedList2 = zip(midpoints2, SNuppers2, SNlowers2, SNnames2)
sorted_pairs2 = sorted(zippedList2, reverse=True)
SNnamesSorted2 = [tup[-1] for tup in sorted_pairs2]
# Third group
SNuppers3 = [i for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
SNlowers3 = [SNlowers[ind] for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
SNnames3 = [SNnames[ind] for ind, i in enumerate(SNuppers) if (i <= ceilVal and SNlowers[ind] <= floorVal)]
midpoints3 = [SNuppers3[i] - (SNuppers3[i] - SNlowers3[i]) / 2 for i in range(len(SNuppers3))]
zippedList3 = zip(midpoints3, SNuppers3, SNlowers3, SNnames3)
sorted_pairs3 = sorted(zippedList3, reverse=True)
SNnamesSorted3 = [tup[-1] for tup in sorted_pairs3]
# Combine groups
SNnamesSorted = SNnamesSorted1.copy()
SNnamesSorted.append(' ')
SNnamesSorted = SNnamesSorted + SNnamesSorted2
SNnamesSorted.append(' ')
SNnamesSorted = SNnamesSorted + SNnamesSorted3
SNnamesSorted.append(' ')
SNnamesSorted.append('(Prior)')
fig, (ax) = plt.subplots(figsize=(10, 6), ncols=1)
for _, upper, lower, name in sorted_pairs1:
plt.plot((name, name), (lower, upper), 'o-', color='red')
plt.plot(('', ''), (np.nan, np.nan), 'o-', color='red')
for _, upper, lower, name in sorted_pairs2:
plt.plot((name, name), (lower, upper), 'o--', color='orange')
plt.plot((' ', ' '), (np.nan, np.nan), 'o--', color='orange')
for _, upper, lower, name in sorted_pairs3:
plt.plot((name, name), (lower, upper), 'o:', color='green')
plt.plot((' ', ' '), (np.nan, np.nan), 'o:', color='green')
plt.plot((SNnamesSorted[-1], SNnamesSorted[-1]), (priorLower, priorUpper), 'o-', color='gray')
plt.ylim([0, 1])
plt.xticks(range(len(SNnamesSorted)), SNnamesSorted, rotation=90)
plt.title('Supply Node 90% Intervals\nManufacturer-District Analysis, Untracked Setting',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Supply Node Name', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontname('Times New Roman')
label.set_fontsize(12)
plt.axhline(y=floorVal, color='r', linestyle='-', alpha=0.1) # line for 'l'
plt.axhline(y=ceilVal, color='blue', linestyle='-', alpha=0.1) # line for 'u'
plt.text(26.3, ceilVal + .015, 'u=0.25', color='blue', alpha=0.5, size=9)
plt.text(26.3, floorVal + .015, 'l=0.05', color='r', alpha=0.5, size=9)
fig.tight_layout()
plt.show()
plt.close()
# Test-node plot
TNindsSubset = range(numTN)
TNnames = [lgDict['outletNames'][i] for i in TNindsSubset]
TNlowers = [np.quantile(lgDict['postSamples'][:, numSN + l], lowerQuant) for l in TNindsSubset]
TNuppers = [np.quantile(lgDict['postSamples'][:, numSN + l], upperQuant) for l in TNindsSubset]
# First group
TNlowers1 = [i for i in TNlowers if i > floorVal]
TNuppers1 = [TNuppers[ind] for ind, i in enumerate(TNlowers) if i > floorVal]
TNnames1 = [TNnames[ind] for ind, i in enumerate(TNlowers) if i > floorVal]
midpoints1 = [TNuppers1[i] - (TNuppers1[i] - TNlowers1[i]) / 2 for i in range(len(TNuppers1))]
zippedList1 = zip(midpoints1, TNuppers1, TNlowers1, TNnames1)
sorted_pairs1 = sorted(zippedList1, reverse=True)
TNnamesSorted1 = [tup[-1] for tup in sorted_pairs1]
# Second group
TNuppers2 = [i for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
TNlowers2 = [TNlowers[ind] for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
TNnames2 = [TNnames[ind] for ind, i in enumerate(TNuppers) if (i > ceilVal and TNlowers[ind] <= floorVal)]
midpoints2 = [TNuppers2[i] - (TNuppers2[i] - TNlowers2[i]) / 2 for i in range(len(TNuppers2))]
zippedList2 = zip(midpoints2, TNuppers2, TNlowers2, TNnames2)
sorted_pairs2 = sorted(zippedList2, reverse=True)
TNnamesSorted2 = [tup[-1] for tup in sorted_pairs2]
# Third group
TNuppers3 = [i for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
TNlowers3 = [TNlowers[ind] for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
TNnames3 = [TNnames[ind] for ind, i in enumerate(TNuppers) if (i <= ceilVal and TNlowers[ind] <= floorVal)]
midpoints3 = [TNuppers3[i] - (TNuppers3[i] - TNlowers3[i]) / 2 for i in range(len(TNuppers3))]
zippedList3 = zip(midpoints3, TNuppers3, TNlowers3, TNnames3)
sorted_pairs3 = sorted(zippedList3, reverse=True)
TNnamesSorted3 = [tup[-1] for tup in sorted_pairs3]
# Combine groups
TNnamesSorted = TNnamesSorted1.copy()
TNnamesSorted.append(' ')
TNnamesSorted = TNnamesSorted + TNnamesSorted2
TNnamesSorted.append(' ')
TNnamesSorted = TNnamesSorted + TNnamesSorted3
TNnamesSorted.append(' ')
TNnamesSorted.append('(Prior)')
fig, (ax) = plt.subplots(figsize=(10, 6), ncols=1)
for _, upper, lower, name in sorted_pairs1:
plt.plot((name, name), (lower, upper), 'o-', color='red')
plt.plot(('', ''), (np.nan, np.nan), 'o-', color='red')
for _, upper, lower, name in sorted_pairs2:
plt.plot((name, name), (lower, upper), 'o--', color='orange')
plt.plot((' ', ' '), (np.nan, np.nan), 'o--', color='orange')
for _, upper, lower, name in sorted_pairs3:
plt.plot((name, name), (lower, upper), 'o:', color='green')
plt.plot((' ', ' '), (np.nan, np.nan), 'o:', color='green')
plt.plot((TNnamesSorted[-1], TNnamesSorted[-1]), (priorLower, priorUpper), 'o-', color='gray')
plt.ylim([0, 1])
plt.xticks(range(len(TNnamesSorted)), TNnamesSorted, rotation=90)
plt.title('Test Node 90% Intervals\nManufacturer-District Analysis, Untracked Setting',
fontdict={'fontsize': 18, 'fontname': 'Trebuchet MS'})
plt.xlabel('Test Node Name', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
plt.ylabel('Interval value', fontdict={'fontsize': 16, 'fontname': 'Trebuchet MS'})
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontname('Times New Roman')
label.set_fontsize(12)
plt.axhline(y=floorVal, color='r', linestyle='-', alpha=0.1) # line for 'l'
plt.axhline(y=ceilVal, color='blue', linestyle='-', alpha=0.1) # line for 'u'
plt.text(26.4, ceilVal + .015, 'u=0.25', color='blue', alpha=0.5, size=9)
plt.text(26.4, floorVal + .015, 'l=0.05', color='r', alpha=0.5, size=9)
fig.tight_layout()
plt.show()
plt.close()
return
_ = generateSyntheticData()
| 54.470922
| 138
| 0.632207
| 5,126
| 38,402
| 4.715373
| 0.08135
| 0.011915
| 0.011915
| 0.017873
| 0.919821
| 0.912995
| 0.910554
| 0.90592
| 0.902942
| 0.897563
| 0
| 0.045805
| 0.202385
| 38,402
| 705
| 139
| 54.470922
| 0.743324
| 0.077782
| 0
| 0.926499
| 1
| 0
| 0.077736
| 0.006224
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001934
| false
| 0
| 0.030948
| 0
| 0.034816
| 0.007737
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a43ec7d45496409ed9eb2eee6f49394938cc5d61
| 9,895
|
py
|
Python
|
optimtool/constrain/unequal.py
|
linjing-lab/optimtool
|
9ca298b91ba755b4dab4028879af2c5a06c2e6d6
|
[
"MIT"
] | 3
|
2022-01-24T14:16:07.000Z
|
2022-02-18T20:02:50.000Z
|
optimtool/constrain/unequal.py
|
linjing-lab/optimtool
|
9ca298b91ba755b4dab4028879af2c5a06c2e6d6
|
[
"MIT"
] | null | null | null |
optimtool/constrain/unequal.py
|
linjing-lab/optimtool
|
9ca298b91ba755b4dab4028879af2c5a06c2e6d6
|
[
"MIT"
] | null | null | null |
# 二次罚函数法(不等式约束)
def penalty_quadratic(funcs, args, cons, x_0, draw=True, output_f=False, method="gradient_descent", sigma=10, p=0.4, epsilon=1e-10, k=0):
'''
Parameters
----------
funcs : sympy.matrices.dense.MutableDenseMatrix
当前目标方程
args : sympy.matrices.dense.MutableDenseMatrix
参数列表
cons : sympy.matrices.dense.MutableDenseMatrix
不等式参数约束列表
x_0 : list
初始迭代点列表
draw : bool
绘图接口参数
output_f : bool
输出迭代函数值列表
method : string
无约束优化方法内核
sigma : double
罚函数因子
p : double
修正参数
epsilon : double
迭代停机准则
k : int
迭代次数
Returns
-------
tuple
最终收敛点, 迭代次数, (迭代函数值列表)
'''
import numpy as np
import sympy as sp
from optimtool.functions.tools import function_f_x_k, function_plot_iteration, function_data_convert
from optimtool.unconstrain.gradient_descent import barzilar_borwein
from optimtool.unconstrain.newton import CG
from optimtool.unconstrain.newton_quasi import L_BFGS
from optimtool.unconstrain.trust_region import steihaug_CG
assert sigma > 0
assert p > 0
assert p < 1
funcs, args, _, cons = function_data_convert(funcs, args, None, cons)
point = []
f = []
while 1:
point.append(np.array(x_0))
f.append(function_f_x_k(funcs, args, x_0))
reps = dict(zip(args, x_0))
consv = np.array(cons.subs(reps)).astype(np.float64)
consv = np.where(consv <= 0, consv, 1)
consv = np.where(consv > 0, consv, 0)
pe = sp.Matrix([funcs + (sigma / 2) * cons.T * consv])
if method == "gradient_descent":
x_0, _ = barzilar_borwein(pe, args, tuple(x_0), draw=False)
elif method == "newton":
x_0, _ = CG(pe, args, tuple(x_0), draw=False)
elif method == "newton_quasi":
x_0, _ = L_BFGS(pe, args, tuple(x_0), draw=False)
elif method == "trust_region":
x_0, _ = steihaug_CG(pe, args, tuple(x_0), draw=False)
k = k + 1
if np.linalg.norm(x_0 - point[k - 1]) < epsilon:
point.append(np.array(x_0))
f.append(function_f_x_k(funcs, args, x_0))
break
sigma = p * sigma
function_plot_iteration(f, draw, "penalty_quadratic_unequal")
if output_f is True:
return x_0, k, f
else:
return x_0, k
# 内点罚函数法(不等式约束)
'''
保证点在定义域内
'''
def penalty_interior_log(funcs, args, cons, x_0, draw=True, output_f=False, sigma=12, p=0.6, epsilon=1e-10, k=0):
'''
Parameters
----------
funcs : sympy.matrices.dense.MutableDenseMatrix
当前目标方程
args : sympy.matrices.dense.MutableDenseMatrix
参数列表
cons : sympy.matrices.dense.MutableDenseMatrix
不等式参数约束列表
x_0 : list
初始迭代点列表
draw : bool
绘图接口参数
output_f : bool
输出迭代函数值列表
sigma : double
罚函数因子
p : double
修正参数
epsilon : double
迭代停机准则
k : int
迭代次数
Returns
-------
tuple
最终收敛点, 迭代次数, (迭代函数值列表)
'''
import numpy as np
import sympy as sp
from optimtool.functions.tools import function_f_x_k, function_plot_iteration, function_data_convert
from optimtool.hybrid.approximate_point_gradient import neg_log
assert sigma > 0
assert p > 0
assert p < 1
funcs, args, _, cons = function_data_convert(funcs, args, None, cons)
point = []
f = []
while 1:
point.append(np.array(x_0))
f.append(function_f_x_k(funcs, args, x_0))
x_0, _ = neg_log(funcs, sigma, -cons, args, tuple(x_0), draw=False)
k = k + 1
sigma = p * sigma
print(np.linalg.norm(x_0 - point[k - 1]))
if np.linalg.norm(x_0 - point[k - 1]) < epsilon:
point.append(np.array(x_0))
f.append(function_f_x_k(funcs, args, x_0))
break
function_plot_iteration(f, draw, "penalty_interior_fraction")
if output_f is True:
return x_0, k, f
else:
return x_0, k
# 分式
def penalty_interior_fraction(funcs, args, cons, x_0, draw=True, output_f=False, method="gradient_descent", sigma=12, p=0.6, epsilon=1e-6, k=0):
'''
Parameters
----------
funcs : sympy.matrices.dense.MutableDenseMatrix
当前目标方程
args : sympy.matrices.dense.MutableDenseMatrix
参数列表
cons : sympy.matrices.dense.MutableDenseMatrix
不等式参数约束列表
x_0 : list
初始迭代点列表
draw : bool
绘图接口参数
output_f : bool
输出迭代函数值列表
method : string
无约束优化方法内核
sigma : double
罚函数因子
p : double
修正参数
epsilon : double
迭代停机准则
k : int
迭代次数
Returns
-------
tuple
最终收敛点, 迭代次数, (迭代函数值列表)
'''
import numpy as np
import sympy as sp
from optimtool.functions.tools import function_f_x_k, function_plot_iteration, function_data_convert
from optimtool.unconstrain.gradient_descent import barzilar_borwein
from optimtool.unconstrain.newton import CG
from optimtool.unconstrain.newton_quasi import L_BFGS
from optimtool.unconstrain.trust_region import steihaug_CG
assert sigma > 0
assert p > 0
assert p < 1
funcs, args, _, cons = function_data_convert(funcs, args, None, cons)
point = []
f = []
sub_pe = 0
for i in cons:
sub_pe += 1 / i
sub_pe = sp.Matrix([sub_pe])
while 1:
point.append(np.array(x_0))
f.append(function_f_x_k(funcs, args, x_0))
pe = sp.Matrix([funcs - sigma * sub_pe])
if method == "gradient_descent":
x_0, _ = barzilar_borwein(pe, args, tuple(x_0), draw=False)
elif method == "newton":
x_0, _ = CG(pe, args, tuple(x_0), draw=False)
elif method == "newton_quasi":
x_0, _ = L_BFGS(pe, args, tuple(x_0), draw=False)
elif method == "trust_region":
x_0, _ = steihaug_CG(pe, args, tuple(x_0), draw=False)
k = k + 1
sigma = p * sigma
if np.linalg.norm(x_0 - point[k - 1]) < epsilon:
point.append(np.array(x_0))
f.append(function_f_x_k(funcs, args, x_0))
break
function_plot_iteration(f, draw, "penalty_interior_fraction")
if output_f is True:
return x_0, k, f
else:
return x_0, k
# 增广拉格朗日函数法(不等式约束)
def lagrange_augmented(funcs, args, cons, x_0, draw=True, output_f=False, method="gradient_descent", muk=10, sigma=8, alpha=0.2, beta=0.7, p=2, eta=1e-1, epsilon=1e-4, k=0):
'''
Parameters
----------
funcs : sympy.matrices.dense.MutableDenseMatrix
当前目标方程
args : sympy.matrices.dense.MutableDenseMatrix
参数列表
cons : sympy.matrices.dense.MutableDenseMatrix
不等式参数约束列表
x_0 : list
初始迭代点列表
draw : bool
绘图接口参数
output_f : bool
输出迭代函数值列表
method : string
无约束优化方法内核
muk : float
因子
sigma : float
罚函数因子
alpha : float
初始步长
beta : float
修正参数
p : float
修正参数
eta : float
常数
epsilon : float
迭代停机准则
k : int
迭代次数
Returns
-------
tuple
最终收敛点, 迭代次数, (迭代函数值列表)
'''
import sympy as sp
import numpy as np
from optimtool.functions.tools import function_f_x_k, function_plot_iteration, function_cons_unequal_L, function_renew_mu_k, function_v_k, function_data_convert
from optimtool.unconstrain.gradient_descent import barzilar_borwein
from optimtool.unconstrain.newton import CG
from optimtool.unconstrain.newton_quasi import L_BFGS
from optimtool.unconstrain.trust_region import steihaug_CG
assert sigma > 0
assert p > 1
assert alpha > 0
assert alpha <= beta
assert beta < 1
funcs, args, _, cons = function_data_convert(funcs, args, None, cons)
f = []
muk = np.array([muk for i in range(cons.shape[0])]).reshape(cons.shape[0], 1)
while 1:
etak = 1 / sigma
epsilonk = 1 / sigma**alpha
cons_uneuqal_modifyed = function_cons_unequal_L(cons, args, muk, sigma, x_0)
L = sp.Matrix([funcs + (sigma / 2) * cons_uneuqal_modifyed])
f.append(function_f_x_k(funcs, args, x_0))
if method == "gradient_descent":
x_0, _ = barzilar_borwein(L, args, x_0, draw=False, epsilon=etak)
elif method == "newton":
x_0, _ = CG(L, args, x_0, draw=False, epsilon=etak)
elif method == "newton_quasi":
x_0, _ = L_BFGS(L, args, x_0, draw=False, epsilon=etak)
elif method == "trust_region":
x_0, _ = steihaug_CG(L, args, x_0, draw=False, epsilon=etak)
k = k + 1
vkx = function_v_k(None, cons, args, muk, sigma, x_0)
if vkx <= epsilonk:
res = L.jacobian(args)
if (vkx <= epsilon) and (np.linalg.norm(np.array(res.subs(dict(zip(args, x_0)))).astype(np.float64)) <= eta):
f.append(function_f_x_k(funcs, args, x_0))
break
else:
muk = function_renew_mu_k(cons, args, muk, sigma, x_0)
sigma = sigma
etak = etak / sigma
epsilonk = epsilonk / sigma**beta
else:
sigma = p * sigma
etak = 1 / sigma
epsilonk = 1 / sigma**alpha
function_plot_iteration(f, draw, "lagrange_augmented_unequal")
if output_f is True:
return x_0, k, f
else:
return x_0, k
| 28.190883
| 173
| 0.571501
| 1,269
| 9,895
| 4.263987
| 0.115051
| 0.024025
| 0.01885
| 0.026428
| 0.838477
| 0.828867
| 0.791166
| 0.776012
| 0.759009
| 0.75328
| 0
| 0.021834
| 0.328853
| 9,895
| 351
| 174
| 28.190883
| 0.792953
| 0.212734
| 0
| 0.75625
| 0
| 0
| 0.040703
| 0.014324
| 0
| 0
| 0
| 0
| 0.0875
| 1
| 0.025
| false
| 0
| 0.15625
| 0
| 0.23125
| 0.00625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f14499bca711ac795f29debc02ddefc538c908be
| 150
|
py
|
Python
|
loldib/getratings/models/NA/na_leesin/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_leesin/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_leesin/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_leesin_top import *
from .na_leesin_jng import *
from .na_leesin_mid import *
from .na_leesin_bot import *
from .na_leesin_sup import *
| 25
| 29
| 0.766667
| 25
| 150
| 4.2
| 0.36
| 0.285714
| 0.571429
| 0.685714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 150
| 5
| 30
| 30
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f17f55bb63ac6daf2621b5e2a549585752177a34
| 240
|
py
|
Python
|
resources/util/python/__init__.py
|
feliciatrinh/rhino
|
adafc0df0c8ed0451e610f2cdac562199515288e
|
[
"Apache-2.0"
] | null | null | null |
resources/util/python/__init__.py
|
feliciatrinh/rhino
|
adafc0df0c8ed0451e610f2cdac562199515288e
|
[
"Apache-2.0"
] | null | null | null |
resources/util/python/__init__.py
|
feliciatrinh/rhino
|
adafc0df0c8ed0451e610f2cdac562199515288e
|
[
"Apache-2.0"
] | null | null | null |
from .util import CONTEXT_FILE_PATH
from .util import RHINO_LIBRARY_PATH
from .util import PORCUPINE_LIBRARY_PATH
from .util import RHINO_MODEL_FILE_PATH
from .util import PORCUPINE_MODEL_FILE_PATH
from .util import KEYWORD_FILE_PATH
| 34.285714
| 44
| 0.85
| 38
| 240
| 5
| 0.289474
| 0.252632
| 0.442105
| 0.473684
| 0.810526
| 0.284211
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 240
| 6
| 45
| 40
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2d4393d244b50df55875999d6748fcea063f2f5a
| 970,224
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/outputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/compute/outputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/compute/outputs.py
|
dimpu47/pulumi-gcp
|
38355de300a5768e11c49d344a8165ba0735deed
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'AutoscalarAutoscalingPolicy',
'AutoscalarAutoscalingPolicyCpuUtilization',
'AutoscalarAutoscalingPolicyLoadBalancingUtilization',
'AutoscalarAutoscalingPolicyMetric',
'AutoscalarAutoscalingPolicyScaleDownControl',
'AutoscalarAutoscalingPolicyScaleDownControlMaxScaledDownReplicas',
'AutoscalerAutoscalingPolicy',
'AutoscalerAutoscalingPolicyCpuUtilization',
'AutoscalerAutoscalingPolicyLoadBalancingUtilization',
'AutoscalerAutoscalingPolicyMetric',
'AutoscalerAutoscalingPolicyScaleDownControl',
'AutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas',
'BackendBucketCdnPolicy',
'BackendServiceBackend',
'BackendServiceCdnPolicy',
'BackendServiceCdnPolicyCacheKeyPolicy',
'BackendServiceCircuitBreakers',
'BackendServiceCircuitBreakersConnectTimeout',
'BackendServiceConsistentHash',
'BackendServiceConsistentHashHttpCookie',
'BackendServiceConsistentHashHttpCookieTtl',
'BackendServiceIap',
'BackendServiceLogConfig',
'BackendServiceOutlierDetection',
'BackendServiceOutlierDetectionBaseEjectionTime',
'BackendServiceOutlierDetectionInterval',
'DiskDiskEncryptionKey',
'DiskIamBindingCondition',
'DiskIamMemberCondition',
'DiskSourceImageEncryptionKey',
'DiskSourceSnapshotEncryptionKey',
'ExternalVpnGatewayInterface',
'FirewallAllow',
'FirewallDeny',
'FirewallLogConfig',
'GlobalForwardingRuleMetadataFilter',
'GlobalForwardingRuleMetadataFilterFilterLabel',
'HaVpnGatewayVpnInterface',
'HealthCheckGrpcHealthCheck',
'HealthCheckHttp2HealthCheck',
'HealthCheckHttpHealthCheck',
'HealthCheckHttpsHealthCheck',
'HealthCheckLogConfig',
'HealthCheckSslHealthCheck',
'HealthCheckTcpHealthCheck',
'ImageGuestOsFeature',
'ImageIamBindingCondition',
'ImageIamMemberCondition',
'ImageRawDisk',
'InstanceAttachedDisk',
'InstanceBootDisk',
'InstanceBootDiskInitializeParams',
'InstanceConfidentialInstanceConfig',
'InstanceFromTemplateAttachedDisk',
'InstanceFromTemplateBootDisk',
'InstanceFromTemplateBootDiskInitializeParams',
'InstanceFromTemplateConfidentialInstanceConfig',
'InstanceFromTemplateGuestAccelerator',
'InstanceFromTemplateNetworkInterface',
'InstanceFromTemplateNetworkInterfaceAccessConfig',
'InstanceFromTemplateNetworkInterfaceAliasIpRange',
'InstanceFromTemplateScheduling',
'InstanceFromTemplateSchedulingNodeAffinity',
'InstanceFromTemplateScratchDisk',
'InstanceFromTemplateServiceAccount',
'InstanceFromTemplateShieldedInstanceConfig',
'InstanceGroupManagerAutoHealingPolicies',
'InstanceGroupManagerNamedPort',
'InstanceGroupManagerStatefulDisk',
'InstanceGroupManagerUpdatePolicy',
'InstanceGroupManagerVersion',
'InstanceGroupManagerVersionTargetSize',
'InstanceGroupNamedPort',
'InstanceGuestAccelerator',
'InstanceIAMBindingCondition',
'InstanceIAMMemberCondition',
'InstanceNetworkInterface',
'InstanceNetworkInterfaceAccessConfig',
'InstanceNetworkInterfaceAliasIpRange',
'InstanceScheduling',
'InstanceSchedulingNodeAffinity',
'InstanceScratchDisk',
'InstanceServiceAccount',
'InstanceShieldedInstanceConfig',
'InstanceTemplateConfidentialInstanceConfig',
'InstanceTemplateDisk',
'InstanceTemplateDiskDiskEncryptionKey',
'InstanceTemplateGuestAccelerator',
'InstanceTemplateNetworkInterface',
'InstanceTemplateNetworkInterfaceAccessConfig',
'InstanceTemplateNetworkInterfaceAliasIpRange',
'InstanceTemplateScheduling',
'InstanceTemplateSchedulingNodeAffinity',
'InstanceTemplateServiceAccount',
'InstanceTemplateShieldedInstanceConfig',
'InterconnectAttachmentPrivateInterconnectInfo',
'ManagedSslCertificateManaged',
'MangedSslCertificateManaged',
'NodeGroupAutoscalingPolicy',
'NodeTemplateNodeTypeFlexibility',
'NodeTemplateServerBinding',
'OrganizationSecurityPolicyRuleMatch',
'OrganizationSecurityPolicyRuleMatchConfig',
'OrganizationSecurityPolicyRuleMatchConfigLayer4Config',
'PacketMirroringCollectorIlb',
'PacketMirroringFilter',
'PacketMirroringMirroredResources',
'PacketMirroringMirroredResourcesInstance',
'PacketMirroringMirroredResourcesSubnetwork',
'PacketMirroringNetwork',
'PerInstanceConfigPreservedState',
'PerInstanceConfigPreservedStateDisk',
'RegionAutoscalerAutoscalingPolicy',
'RegionAutoscalerAutoscalingPolicyCpuUtilization',
'RegionAutoscalerAutoscalingPolicyLoadBalancingUtilization',
'RegionAutoscalerAutoscalingPolicyMetric',
'RegionAutoscalerAutoscalingPolicyScaleDownControl',
'RegionAutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas',
'RegionBackendServiceBackend',
'RegionBackendServiceCircuitBreakers',
'RegionBackendServiceCircuitBreakersConnectTimeout',
'RegionBackendServiceConsistentHash',
'RegionBackendServiceConsistentHashHttpCookie',
'RegionBackendServiceConsistentHashHttpCookieTtl',
'RegionBackendServiceFailoverPolicy',
'RegionBackendServiceLogConfig',
'RegionBackendServiceOutlierDetection',
'RegionBackendServiceOutlierDetectionBaseEjectionTime',
'RegionBackendServiceOutlierDetectionInterval',
'RegionDiskDiskEncryptionKey',
'RegionDiskIamBindingCondition',
'RegionDiskIamMemberCondition',
'RegionDiskSourceSnapshotEncryptionKey',
'RegionHealthCheckGrpcHealthCheck',
'RegionHealthCheckHttp2HealthCheck',
'RegionHealthCheckHttpHealthCheck',
'RegionHealthCheckHttpsHealthCheck',
'RegionHealthCheckLogConfig',
'RegionHealthCheckSslHealthCheck',
'RegionHealthCheckTcpHealthCheck',
'RegionInstanceGroupManagerAutoHealingPolicies',
'RegionInstanceGroupManagerNamedPort',
'RegionInstanceGroupManagerStatefulDisk',
'RegionInstanceGroupManagerUpdatePolicy',
'RegionInstanceGroupManagerVersion',
'RegionInstanceGroupManagerVersionTargetSize',
'RegionNetworkEndpointGroupAppEngine',
'RegionNetworkEndpointGroupCloudFunction',
'RegionNetworkEndpointGroupCloudRun',
'RegionPerInstanceConfigPreservedState',
'RegionPerInstanceConfigPreservedStateDisk',
'RegionUrlMapDefaultUrlRedirect',
'RegionUrlMapHostRule',
'RegionUrlMapPathMatcher',
'RegionUrlMapPathMatcherDefaultUrlRedirect',
'RegionUrlMapPathMatcherPathRule',
'RegionUrlMapPathMatcherPathRuleRouteAction',
'RegionUrlMapPathMatcherPathRuleRouteActionCorsPolicy',
'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicy',
'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort',
'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay',
'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay',
'RegionUrlMapPathMatcherPathRuleRouteActionRequestMirrorPolicy',
'RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicy',
'RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout',
'RegionUrlMapPathMatcherPathRuleRouteActionTimeout',
'RegionUrlMapPathMatcherPathRuleRouteActionUrlRewrite',
'RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendService',
'RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction',
'RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd',
'RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd',
'RegionUrlMapPathMatcherPathRuleUrlRedirect',
'RegionUrlMapPathMatcherRouteRule',
'RegionUrlMapPathMatcherRouteRuleHeaderAction',
'RegionUrlMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd',
'RegionUrlMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd',
'RegionUrlMapPathMatcherRouteRuleMatchRule',
'RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatch',
'RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch',
'RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilter',
'RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel',
'RegionUrlMapPathMatcherRouteRuleMatchRuleQueryParameterMatch',
'RegionUrlMapPathMatcherRouteRuleRouteAction',
'RegionUrlMapPathMatcherRouteRuleRouteActionCorsPolicy',
'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy',
'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort',
'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay',
'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay',
'RegionUrlMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy',
'RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicy',
'RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout',
'RegionUrlMapPathMatcherRouteRuleRouteActionTimeout',
'RegionUrlMapPathMatcherRouteRuleRouteActionUrlRewrite',
'RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendService',
'RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction',
'RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd',
'RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd',
'RegionUrlMapPathMatcherRouteRuleUrlRedirect',
'RegionUrlMapTest',
'ReservationSpecificReservation',
'ReservationSpecificReservationInstanceProperties',
'ReservationSpecificReservationInstancePropertiesGuestAccelerator',
'ReservationSpecificReservationInstancePropertiesLocalSsd',
'ResourcePolicyGroupPlacementPolicy',
'ResourcePolicySnapshotSchedulePolicy',
'ResourcePolicySnapshotSchedulePolicyRetentionPolicy',
'ResourcePolicySnapshotSchedulePolicySchedule',
'ResourcePolicySnapshotSchedulePolicyScheduleDailySchedule',
'ResourcePolicySnapshotSchedulePolicyScheduleHourlySchedule',
'ResourcePolicySnapshotSchedulePolicyScheduleWeeklySchedule',
'ResourcePolicySnapshotSchedulePolicyScheduleWeeklyScheduleDayOfWeek',
'ResourcePolicySnapshotSchedulePolicySnapshotProperties',
'RouterBgp',
'RouterBgpAdvertisedIpRange',
'RouterNatLogConfig',
'RouterNatSubnetwork',
'RouterPeerAdvertisedIpRange',
'SecurityPolicyRule',
'SecurityPolicyRuleMatch',
'SecurityPolicyRuleMatchConfig',
'SecurityPolicyRuleMatchExpr',
'SecurityScanConfigAuthentication',
'SecurityScanConfigAuthenticationCustomAccount',
'SecurityScanConfigAuthenticationGoogleAccount',
'SecurityScanConfigSchedule',
'SnapshotSnapshotEncryptionKey',
'SnapshotSourceDiskEncryptionKey',
'SubnetworkIAMBindingCondition',
'SubnetworkIAMMemberCondition',
'SubnetworkLogConfig',
'SubnetworkSecondaryIpRange',
'URLMapDefaultRouteAction',
'URLMapDefaultRouteActionCorsPolicy',
'URLMapDefaultRouteActionFaultInjectionPolicy',
'URLMapDefaultRouteActionFaultInjectionPolicyAbort',
'URLMapDefaultRouteActionFaultInjectionPolicyDelay',
'URLMapDefaultRouteActionFaultInjectionPolicyDelayFixedDelay',
'URLMapDefaultRouteActionRequestMirrorPolicy',
'URLMapDefaultRouteActionRetryPolicy',
'URLMapDefaultRouteActionRetryPolicyPerTryTimeout',
'URLMapDefaultRouteActionTimeout',
'URLMapDefaultRouteActionUrlRewrite',
'URLMapDefaultRouteActionWeightedBackendService',
'URLMapDefaultRouteActionWeightedBackendServiceHeaderAction',
'URLMapDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd',
'URLMapDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd',
'URLMapDefaultUrlRedirect',
'URLMapHeaderAction',
'URLMapHeaderActionRequestHeadersToAdd',
'URLMapHeaderActionResponseHeadersToAdd',
'URLMapHostRule',
'URLMapPathMatcher',
'URLMapPathMatcherDefaultRouteAction',
'URLMapPathMatcherDefaultRouteActionCorsPolicy',
'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicy',
'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyAbort',
'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelay',
'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelayFixedDelay',
'URLMapPathMatcherDefaultRouteActionRequestMirrorPolicy',
'URLMapPathMatcherDefaultRouteActionRetryPolicy',
'URLMapPathMatcherDefaultRouteActionRetryPolicyPerTryTimeout',
'URLMapPathMatcherDefaultRouteActionTimeout',
'URLMapPathMatcherDefaultRouteActionUrlRewrite',
'URLMapPathMatcherDefaultRouteActionWeightedBackendService',
'URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderAction',
'URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd',
'URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd',
'URLMapPathMatcherDefaultUrlRedirect',
'URLMapPathMatcherHeaderAction',
'URLMapPathMatcherHeaderActionRequestHeadersToAdd',
'URLMapPathMatcherHeaderActionResponseHeadersToAdd',
'URLMapPathMatcherPathRule',
'URLMapPathMatcherPathRuleRouteAction',
'URLMapPathMatcherPathRuleRouteActionCorsPolicy',
'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicy',
'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort',
'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay',
'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay',
'URLMapPathMatcherPathRuleRouteActionRequestMirrorPolicy',
'URLMapPathMatcherPathRuleRouteActionRetryPolicy',
'URLMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout',
'URLMapPathMatcherPathRuleRouteActionTimeout',
'URLMapPathMatcherPathRuleRouteActionUrlRewrite',
'URLMapPathMatcherPathRuleRouteActionWeightedBackendService',
'URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction',
'URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd',
'URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd',
'URLMapPathMatcherPathRuleUrlRedirect',
'URLMapPathMatcherRouteRule',
'URLMapPathMatcherRouteRuleHeaderAction',
'URLMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd',
'URLMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd',
'URLMapPathMatcherRouteRuleMatchRule',
'URLMapPathMatcherRouteRuleMatchRuleHeaderMatch',
'URLMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch',
'URLMapPathMatcherRouteRuleMatchRuleMetadataFilter',
'URLMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel',
'URLMapPathMatcherRouteRuleMatchRuleQueryParameterMatch',
'URLMapPathMatcherRouteRuleRouteAction',
'URLMapPathMatcherRouteRuleRouteActionCorsPolicy',
'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy',
'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort',
'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay',
'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay',
'URLMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy',
'URLMapPathMatcherRouteRuleRouteActionRetryPolicy',
'URLMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout',
'URLMapPathMatcherRouteRuleRouteActionTimeout',
'URLMapPathMatcherRouteRuleRouteActionUrlRewrite',
'URLMapPathMatcherRouteRuleRouteActionWeightedBackendService',
'URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction',
'URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd',
'URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd',
'URLMapPathMatcherRouteRuleUrlRedirect',
'URLMapTest',
'GetBackendBucketCdnPolicyResult',
'GetBackendServiceBackendResult',
'GetBackendServiceCdnPolicyResult',
'GetBackendServiceCdnPolicyCacheKeyPolicyResult',
'GetBackendServiceCircuitBreakerResult',
'GetBackendServiceCircuitBreakerConnectTimeoutResult',
'GetBackendServiceConsistentHashResult',
'GetBackendServiceConsistentHashHttpCookyResult',
'GetBackendServiceConsistentHashHttpCookyTtlResult',
'GetBackendServiceIapResult',
'GetBackendServiceLogConfigResult',
'GetBackendServiceOutlierDetectionResult',
'GetBackendServiceOutlierDetectionBaseEjectionTimeResult',
'GetBackendServiceOutlierDetectionIntervalResult',
'GetInstanceAttachedDiskResult',
'GetInstanceBootDiskResult',
'GetInstanceBootDiskInitializeParamResult',
'GetInstanceConfidentialInstanceConfigResult',
'GetInstanceGroupNamedPortResult',
'GetInstanceGuestAcceleratorResult',
'GetInstanceNetworkInterfaceResult',
'GetInstanceNetworkInterfaceAccessConfigResult',
'GetInstanceNetworkInterfaceAliasIpRangeResult',
'GetInstanceSchedulingResult',
'GetInstanceSchedulingNodeAffinityResult',
'GetInstanceScratchDiskResult',
'GetInstanceServiceAccountResult',
'GetInstanceShieldedInstanceConfigResult',
'GetRegionInstanceGroupInstanceResult',
'GetRegionInstanceGroupInstanceNamedPortResult',
'GetRouterBgpResult',
'GetRouterBgpAdvertisedIpRangeResult',
'GetSubnetworkSecondaryIpRangeResult',
]
@pulumi.output_type
class AutoscalarAutoscalingPolicy(dict):
def __init__(__self__, *,
max_replicas: float,
min_replicas: float,
cooldown_period: Optional[float] = None,
cpu_utilization: Optional['outputs.AutoscalarAutoscalingPolicyCpuUtilization'] = None,
load_balancing_utilization: Optional['outputs.AutoscalarAutoscalingPolicyLoadBalancingUtilization'] = None,
metrics: Optional[List['outputs.AutoscalarAutoscalingPolicyMetric']] = None,
mode: Optional[str] = None,
scale_down_control: Optional['outputs.AutoscalarAutoscalingPolicyScaleDownControl'] = None):
"""
:param float max_replicas: The maximum number of instances that the autoscaler can scale up
to. This is required when creating or updating an autoscaler. The
maximum number of replicas should not be lower than minimal number
of replicas.
:param float min_replicas: The minimum number of replicas that the autoscaler can scale down
to. This cannot be less than 0. If not provided, autoscaler will
choose a default value depending on maximum number of instances
allowed.
:param float cooldown_period: The number of seconds that the autoscaler should wait before it
starts collecting information from a new instance. This prevents
the autoscaler from collecting information when the instance is
initializing, during which the collected usage would not be
reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of
numerous factors. We recommend that you test how long an
instance may take to initialize. To do this, create an instance
and time the startup process.
:param 'AutoscalarAutoscalingPolicyCpuUtilizationArgs' cpu_utilization: Defines the CPU utilization policy that allows the autoscaler to
scale based on the average CPU utilization of a managed instance
group.
Structure is documented below.
:param 'AutoscalarAutoscalingPolicyLoadBalancingUtilizationArgs' load_balancing_utilization: Configuration parameters of autoscaling based on a load balancer.
Structure is documented below.
:param List['AutoscalarAutoscalingPolicyMetricArgs'] metrics: Configuration parameters of autoscaling based on a custom metric.
Structure is documented below.
:param str mode: Defines operating mode for this policy.
Default value is `ON`.
Possible values are `OFF`, `ONLY_UP`, and `ON`.
:param 'AutoscalarAutoscalingPolicyScaleDownControlArgs' scale_down_control: Defines scale down controls to reduce the risk of response latency
and outages due to abrupt scale-in events
Structure is documented below.
"""
pulumi.set(__self__, "max_replicas", max_replicas)
pulumi.set(__self__, "min_replicas", min_replicas)
if cooldown_period is not None:
pulumi.set(__self__, "cooldown_period", cooldown_period)
if cpu_utilization is not None:
pulumi.set(__self__, "cpu_utilization", cpu_utilization)
if load_balancing_utilization is not None:
pulumi.set(__self__, "load_balancing_utilization", load_balancing_utilization)
if metrics is not None:
pulumi.set(__self__, "metrics", metrics)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if scale_down_control is not None:
pulumi.set(__self__, "scale_down_control", scale_down_control)
@property
@pulumi.getter(name="maxReplicas")
def max_replicas(self) -> float:
"""
The maximum number of instances that the autoscaler can scale up
to. This is required when creating or updating an autoscaler. The
maximum number of replicas should not be lower than minimal number
of replicas.
"""
return pulumi.get(self, "max_replicas")
@property
@pulumi.getter(name="minReplicas")
def min_replicas(self) -> float:
"""
The minimum number of replicas that the autoscaler can scale down
to. This cannot be less than 0. If not provided, autoscaler will
choose a default value depending on maximum number of instances
allowed.
"""
return pulumi.get(self, "min_replicas")
@property
@pulumi.getter(name="cooldownPeriod")
def cooldown_period(self) -> Optional[float]:
"""
The number of seconds that the autoscaler should wait before it
starts collecting information from a new instance. This prevents
the autoscaler from collecting information when the instance is
initializing, during which the collected usage would not be
reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of
numerous factors. We recommend that you test how long an
instance may take to initialize. To do this, create an instance
and time the startup process.
"""
return pulumi.get(self, "cooldown_period")
@property
@pulumi.getter(name="cpuUtilization")
def cpu_utilization(self) -> Optional['outputs.AutoscalarAutoscalingPolicyCpuUtilization']:
"""
Defines the CPU utilization policy that allows the autoscaler to
scale based on the average CPU utilization of a managed instance
group.
Structure is documented below.
"""
return pulumi.get(self, "cpu_utilization")
@property
@pulumi.getter(name="loadBalancingUtilization")
def load_balancing_utilization(self) -> Optional['outputs.AutoscalarAutoscalingPolicyLoadBalancingUtilization']:
"""
Configuration parameters of autoscaling based on a load balancer.
Structure is documented below.
"""
return pulumi.get(self, "load_balancing_utilization")
@property
@pulumi.getter
def metrics(self) -> Optional[List['outputs.AutoscalarAutoscalingPolicyMetric']]:
"""
Configuration parameters of autoscaling based on a custom metric.
Structure is documented below.
"""
return pulumi.get(self, "metrics")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
Defines operating mode for this policy.
Default value is `ON`.
Possible values are `OFF`, `ONLY_UP`, and `ON`.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="scaleDownControl")
def scale_down_control(self) -> Optional['outputs.AutoscalarAutoscalingPolicyScaleDownControl']:
"""
Defines scale down controls to reduce the risk of response latency
and outages due to abrupt scale-in events
Structure is documented below.
"""
return pulumi.get(self, "scale_down_control")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalarAutoscalingPolicyCpuUtilization(dict):
def __init__(__self__, *,
target: float):
"""
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> float:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalarAutoscalingPolicyLoadBalancingUtilization(dict):
def __init__(__self__, *,
target: float):
"""
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> float:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalarAutoscalingPolicyMetric(dict):
def __init__(__self__, *,
name: str,
filter: Optional[str] = None,
single_instance_assignment: Optional[float] = None,
target: Optional[float] = None,
type: Optional[str] = None):
"""
:param str name: The identifier (type) of the Stackdriver Monitoring metric.
The metric cannot have negative values.
The metric must have a value type of INT64 or DOUBLE.
:param str filter: A filter string to be used as the filter string for
a Stackdriver Monitoring TimeSeries.list API call.
This filter is used to select a specific TimeSeries for
the purpose of autoscaling and to determine whether the metric
is exporting per-instance or per-group data.
You can only use the AND operator for joining selectors.
You can only use direct equality comparison operator (=) without
any functions for each selector.
You can specify the metric in both the filter string and in the
metric field. However, if specified in both places, the metric must
be identical.
The monitored resource type determines what kind of values are
expected for the metric. If it is a gce_instance, the autoscaler
expects the metric to include a separate TimeSeries for each
instance in a group. In such a case, you cannot filter on resource
labels.
If the resource type is any other value, the autoscaler expects
this metric to contain values that apply to the entire autoscaled
instance group and resource label filtering can be performed to
point autoscaler at the correct TimeSeries to scale upon.
This is called a per-group metric for the purpose of autoscaling.
If not specified, the type defaults to gce_instance.
You should provide a filter that is selective enough to pick just
one TimeSeries for the autoscaled group or for each of the instances
(if you are using gce_instance resource type). If multiple
TimeSeries are returned upon the query execution, the autoscaler
will sum their respective values to obtain its scaling value.
:param float single_instance_assignment: If scaling is based on a per-group metric value that represents the
total amount of work to be done or resource usage, set this value to
an amount assigned for a single instance of the scaled group.
The autoscaler will keep the number of instances proportional to the
value of this metric, the metric itself should not change value due
to group resizing.
For example, a good metric to use with the target is
`pubsub.googleapis.com/subscription/num_undelivered_messages`
or a custom metric exporting the total number of requests coming to
your instances.
A bad example would be a metric exporting an average or median
latency, since this value can't include a chunk assignable to a
single instance, it could be better used with utilization_target
instead.
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
:param str type: Defines how target utilization value is expressed for a
Stackdriver Monitoring metric.
Possible values are `GAUGE`, `DELTA_PER_SECOND`, and `DELTA_PER_MINUTE`.
"""
pulumi.set(__self__, "name", name)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if single_instance_assignment is not None:
pulumi.set(__self__, "single_instance_assignment", single_instance_assignment)
if target is not None:
pulumi.set(__self__, "target", target)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> str:
"""
The identifier (type) of the Stackdriver Monitoring metric.
The metric cannot have negative values.
The metric must have a value type of INT64 or DOUBLE.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
"""
A filter string to be used as the filter string for
a Stackdriver Monitoring TimeSeries.list API call.
This filter is used to select a specific TimeSeries for
the purpose of autoscaling and to determine whether the metric
is exporting per-instance or per-group data.
You can only use the AND operator for joining selectors.
You can only use direct equality comparison operator (=) without
any functions for each selector.
You can specify the metric in both the filter string and in the
metric field. However, if specified in both places, the metric must
be identical.
The monitored resource type determines what kind of values are
expected for the metric. If it is a gce_instance, the autoscaler
expects the metric to include a separate TimeSeries for each
instance in a group. In such a case, you cannot filter on resource
labels.
If the resource type is any other value, the autoscaler expects
this metric to contain values that apply to the entire autoscaled
instance group and resource label filtering can be performed to
point autoscaler at the correct TimeSeries to scale upon.
This is called a per-group metric for the purpose of autoscaling.
If not specified, the type defaults to gce_instance.
You should provide a filter that is selective enough to pick just
one TimeSeries for the autoscaled group or for each of the instances
(if you are using gce_instance resource type). If multiple
TimeSeries are returned upon the query execution, the autoscaler
will sum their respective values to obtain its scaling value.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="singleInstanceAssignment")
def single_instance_assignment(self) -> Optional[float]:
"""
If scaling is based on a per-group metric value that represents the
total amount of work to be done or resource usage, set this value to
an amount assigned for a single instance of the scaled group.
The autoscaler will keep the number of instances proportional to the
value of this metric, the metric itself should not change value due
to group resizing.
For example, a good metric to use with the target is
`pubsub.googleapis.com/subscription/num_undelivered_messages`
or a custom metric exporting the total number of requests coming to
your instances.
A bad example would be a metric exporting an average or median
latency, since this value can't include a chunk assignable to a
single instance, it could be better used with utilization_target
instead.
"""
return pulumi.get(self, "single_instance_assignment")
@property
@pulumi.getter
def target(self) -> Optional[float]:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Defines how target utilization value is expressed for a
Stackdriver Monitoring metric.
Possible values are `GAUGE`, `DELTA_PER_SECOND`, and `DELTA_PER_MINUTE`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalarAutoscalingPolicyScaleDownControl(dict):
def __init__(__self__, *,
max_scaled_down_replicas: Optional['outputs.AutoscalarAutoscalingPolicyScaleDownControlMaxScaledDownReplicas'] = None,
time_window_sec: Optional[float] = None):
"""
:param 'AutoscalarAutoscalingPolicyScaleDownControlMaxScaledDownReplicasArgs' max_scaled_down_replicas: A nested object resource
Structure is documented below.
:param float time_window_sec: How long back autoscaling should look when computing recommendations
to include directives regarding slower scale down, as described above.
"""
if max_scaled_down_replicas is not None:
pulumi.set(__self__, "max_scaled_down_replicas", max_scaled_down_replicas)
if time_window_sec is not None:
pulumi.set(__self__, "time_window_sec", time_window_sec)
@property
@pulumi.getter(name="maxScaledDownReplicas")
def max_scaled_down_replicas(self) -> Optional['outputs.AutoscalarAutoscalingPolicyScaleDownControlMaxScaledDownReplicas']:
"""
A nested object resource
Structure is documented below.
"""
return pulumi.get(self, "max_scaled_down_replicas")
@property
@pulumi.getter(name="timeWindowSec")
def time_window_sec(self) -> Optional[float]:
"""
How long back autoscaling should look when computing recommendations
to include directives regarding slower scale down, as described above.
"""
return pulumi.get(self, "time_window_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalarAutoscalingPolicyScaleDownControlMaxScaledDownReplicas(dict):
def __init__(__self__, *,
fixed: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float fixed: Specifies a fixed number of VM instances. This must be a positive
integer.
:param float percent: Specifies a percentage of instances between 0 to 100%, inclusive.
For example, specify 80 for 80%.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def fixed(self) -> Optional[float]:
"""
Specifies a fixed number of VM instances. This must be a positive
integer.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
Specifies a percentage of instances between 0 to 100%, inclusive.
For example, specify 80 for 80%.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalerAutoscalingPolicy(dict):
def __init__(__self__, *,
max_replicas: float,
min_replicas: float,
cooldown_period: Optional[float] = None,
cpu_utilization: Optional['outputs.AutoscalerAutoscalingPolicyCpuUtilization'] = None,
load_balancing_utilization: Optional['outputs.AutoscalerAutoscalingPolicyLoadBalancingUtilization'] = None,
metrics: Optional[List['outputs.AutoscalerAutoscalingPolicyMetric']] = None,
mode: Optional[str] = None,
scale_down_control: Optional['outputs.AutoscalerAutoscalingPolicyScaleDownControl'] = None):
"""
:param float max_replicas: The maximum number of instances that the autoscaler can scale up
to. This is required when creating or updating an autoscaler. The
maximum number of replicas should not be lower than minimal number
of replicas.
:param float min_replicas: The minimum number of replicas that the autoscaler can scale down
to. This cannot be less than 0. If not provided, autoscaler will
choose a default value depending on maximum number of instances
allowed.
:param float cooldown_period: The number of seconds that the autoscaler should wait before it
starts collecting information from a new instance. This prevents
the autoscaler from collecting information when the instance is
initializing, during which the collected usage would not be
reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of
numerous factors. We recommend that you test how long an
instance may take to initialize. To do this, create an instance
and time the startup process.
:param 'AutoscalerAutoscalingPolicyCpuUtilizationArgs' cpu_utilization: Defines the CPU utilization policy that allows the autoscaler to
scale based on the average CPU utilization of a managed instance
group.
Structure is documented below.
:param 'AutoscalerAutoscalingPolicyLoadBalancingUtilizationArgs' load_balancing_utilization: Configuration parameters of autoscaling based on a load balancer.
Structure is documented below.
:param List['AutoscalerAutoscalingPolicyMetricArgs'] metrics: Configuration parameters of autoscaling based on a custom metric.
Structure is documented below.
:param str mode: Defines operating mode for this policy.
Default value is `ON`.
Possible values are `OFF`, `ONLY_UP`, and `ON`.
:param 'AutoscalerAutoscalingPolicyScaleDownControlArgs' scale_down_control: Defines scale down controls to reduce the risk of response latency
and outages due to abrupt scale-in events
Structure is documented below.
"""
pulumi.set(__self__, "max_replicas", max_replicas)
pulumi.set(__self__, "min_replicas", min_replicas)
if cooldown_period is not None:
pulumi.set(__self__, "cooldown_period", cooldown_period)
if cpu_utilization is not None:
pulumi.set(__self__, "cpu_utilization", cpu_utilization)
if load_balancing_utilization is not None:
pulumi.set(__self__, "load_balancing_utilization", load_balancing_utilization)
if metrics is not None:
pulumi.set(__self__, "metrics", metrics)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if scale_down_control is not None:
pulumi.set(__self__, "scale_down_control", scale_down_control)
@property
@pulumi.getter(name="maxReplicas")
def max_replicas(self) -> float:
"""
The maximum number of instances that the autoscaler can scale up
to. This is required when creating or updating an autoscaler. The
maximum number of replicas should not be lower than minimal number
of replicas.
"""
return pulumi.get(self, "max_replicas")
@property
@pulumi.getter(name="minReplicas")
def min_replicas(self) -> float:
"""
The minimum number of replicas that the autoscaler can scale down
to. This cannot be less than 0. If not provided, autoscaler will
choose a default value depending on maximum number of instances
allowed.
"""
return pulumi.get(self, "min_replicas")
@property
@pulumi.getter(name="cooldownPeriod")
def cooldown_period(self) -> Optional[float]:
"""
The number of seconds that the autoscaler should wait before it
starts collecting information from a new instance. This prevents
the autoscaler from collecting information when the instance is
initializing, during which the collected usage would not be
reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of
numerous factors. We recommend that you test how long an
instance may take to initialize. To do this, create an instance
and time the startup process.
"""
return pulumi.get(self, "cooldown_period")
@property
@pulumi.getter(name="cpuUtilization")
def cpu_utilization(self) -> Optional['outputs.AutoscalerAutoscalingPolicyCpuUtilization']:
"""
Defines the CPU utilization policy that allows the autoscaler to
scale based on the average CPU utilization of a managed instance
group.
Structure is documented below.
"""
return pulumi.get(self, "cpu_utilization")
@property
@pulumi.getter(name="loadBalancingUtilization")
def load_balancing_utilization(self) -> Optional['outputs.AutoscalerAutoscalingPolicyLoadBalancingUtilization']:
"""
Configuration parameters of autoscaling based on a load balancer.
Structure is documented below.
"""
return pulumi.get(self, "load_balancing_utilization")
@property
@pulumi.getter
def metrics(self) -> Optional[List['outputs.AutoscalerAutoscalingPolicyMetric']]:
"""
Configuration parameters of autoscaling based on a custom metric.
Structure is documented below.
"""
return pulumi.get(self, "metrics")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
Defines operating mode for this policy.
Default value is `ON`.
Possible values are `OFF`, `ONLY_UP`, and `ON`.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="scaleDownControl")
def scale_down_control(self) -> Optional['outputs.AutoscalerAutoscalingPolicyScaleDownControl']:
"""
Defines scale down controls to reduce the risk of response latency
and outages due to abrupt scale-in events
Structure is documented below.
"""
return pulumi.get(self, "scale_down_control")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalerAutoscalingPolicyCpuUtilization(dict):
def __init__(__self__, *,
target: float):
"""
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> float:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalerAutoscalingPolicyLoadBalancingUtilization(dict):
def __init__(__self__, *,
target: float):
"""
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> float:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalerAutoscalingPolicyMetric(dict):
def __init__(__self__, *,
name: str,
filter: Optional[str] = None,
single_instance_assignment: Optional[float] = None,
target: Optional[float] = None,
type: Optional[str] = None):
"""
:param str name: The identifier (type) of the Stackdriver Monitoring metric.
The metric cannot have negative values.
The metric must have a value type of INT64 or DOUBLE.
:param str filter: A filter string to be used as the filter string for
a Stackdriver Monitoring TimeSeries.list API call.
This filter is used to select a specific TimeSeries for
the purpose of autoscaling and to determine whether the metric
is exporting per-instance or per-group data.
You can only use the AND operator for joining selectors.
You can only use direct equality comparison operator (=) without
any functions for each selector.
You can specify the metric in both the filter string and in the
metric field. However, if specified in both places, the metric must
be identical.
The monitored resource type determines what kind of values are
expected for the metric. If it is a gce_instance, the autoscaler
expects the metric to include a separate TimeSeries for each
instance in a group. In such a case, you cannot filter on resource
labels.
If the resource type is any other value, the autoscaler expects
this metric to contain values that apply to the entire autoscaled
instance group and resource label filtering can be performed to
point autoscaler at the correct TimeSeries to scale upon.
This is called a per-group metric for the purpose of autoscaling.
If not specified, the type defaults to gce_instance.
You should provide a filter that is selective enough to pick just
one TimeSeries for the autoscaled group or for each of the instances
(if you are using gce_instance resource type). If multiple
TimeSeries are returned upon the query execution, the autoscaler
will sum their respective values to obtain its scaling value.
:param float single_instance_assignment: If scaling is based on a per-group metric value that represents the
total amount of work to be done or resource usage, set this value to
an amount assigned for a single instance of the scaled group.
The autoscaler will keep the number of instances proportional to the
value of this metric, the metric itself should not change value due
to group resizing.
For example, a good metric to use with the target is
`pubsub.googleapis.com/subscription/num_undelivered_messages`
or a custom metric exporting the total number of requests coming to
your instances.
A bad example would be a metric exporting an average or median
latency, since this value can't include a chunk assignable to a
single instance, it could be better used with utilization_target
instead.
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
:param str type: Defines how target utilization value is expressed for a
Stackdriver Monitoring metric.
Possible values are `GAUGE`, `DELTA_PER_SECOND`, and `DELTA_PER_MINUTE`.
"""
pulumi.set(__self__, "name", name)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if single_instance_assignment is not None:
pulumi.set(__self__, "single_instance_assignment", single_instance_assignment)
if target is not None:
pulumi.set(__self__, "target", target)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> str:
"""
The identifier (type) of the Stackdriver Monitoring metric.
The metric cannot have negative values.
The metric must have a value type of INT64 or DOUBLE.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
"""
A filter string to be used as the filter string for
a Stackdriver Monitoring TimeSeries.list API call.
This filter is used to select a specific TimeSeries for
the purpose of autoscaling and to determine whether the metric
is exporting per-instance or per-group data.
You can only use the AND operator for joining selectors.
You can only use direct equality comparison operator (=) without
any functions for each selector.
You can specify the metric in both the filter string and in the
metric field. However, if specified in both places, the metric must
be identical.
The monitored resource type determines what kind of values are
expected for the metric. If it is a gce_instance, the autoscaler
expects the metric to include a separate TimeSeries for each
instance in a group. In such a case, you cannot filter on resource
labels.
If the resource type is any other value, the autoscaler expects
this metric to contain values that apply to the entire autoscaled
instance group and resource label filtering can be performed to
point autoscaler at the correct TimeSeries to scale upon.
This is called a per-group metric for the purpose of autoscaling.
If not specified, the type defaults to gce_instance.
You should provide a filter that is selective enough to pick just
one TimeSeries for the autoscaled group or for each of the instances
(if you are using gce_instance resource type). If multiple
TimeSeries are returned upon the query execution, the autoscaler
will sum their respective values to obtain its scaling value.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="singleInstanceAssignment")
def single_instance_assignment(self) -> Optional[float]:
"""
If scaling is based on a per-group metric value that represents the
total amount of work to be done or resource usage, set this value to
an amount assigned for a single instance of the scaled group.
The autoscaler will keep the number of instances proportional to the
value of this metric, the metric itself should not change value due
to group resizing.
For example, a good metric to use with the target is
`pubsub.googleapis.com/subscription/num_undelivered_messages`
or a custom metric exporting the total number of requests coming to
your instances.
A bad example would be a metric exporting an average or median
latency, since this value can't include a chunk assignable to a
single instance, it could be better used with utilization_target
instead.
"""
return pulumi.get(self, "single_instance_assignment")
@property
@pulumi.getter
def target(self) -> Optional[float]:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Defines how target utilization value is expressed for a
Stackdriver Monitoring metric.
Possible values are `GAUGE`, `DELTA_PER_SECOND`, and `DELTA_PER_MINUTE`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalerAutoscalingPolicyScaleDownControl(dict):
def __init__(__self__, *,
max_scaled_down_replicas: Optional['outputs.AutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas'] = None,
time_window_sec: Optional[float] = None):
"""
:param 'AutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicasArgs' max_scaled_down_replicas: A nested object resource
Structure is documented below.
:param float time_window_sec: How long back autoscaling should look when computing recommendations
to include directives regarding slower scale down, as described above.
"""
if max_scaled_down_replicas is not None:
pulumi.set(__self__, "max_scaled_down_replicas", max_scaled_down_replicas)
if time_window_sec is not None:
pulumi.set(__self__, "time_window_sec", time_window_sec)
@property
@pulumi.getter(name="maxScaledDownReplicas")
def max_scaled_down_replicas(self) -> Optional['outputs.AutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas']:
"""
A nested object resource
Structure is documented below.
"""
return pulumi.get(self, "max_scaled_down_replicas")
@property
@pulumi.getter(name="timeWindowSec")
def time_window_sec(self) -> Optional[float]:
"""
How long back autoscaling should look when computing recommendations
to include directives regarding slower scale down, as described above.
"""
return pulumi.get(self, "time_window_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas(dict):
def __init__(__self__, *,
fixed: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float fixed: Specifies a fixed number of VM instances. This must be a positive
integer.
:param float percent: Specifies a percentage of instances between 0 to 100%, inclusive.
For example, specify 80 for 80%.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def fixed(self) -> Optional[float]:
"""
Specifies a fixed number of VM instances. This must be a positive
integer.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
Specifies a percentage of instances between 0 to 100%, inclusive.
For example, specify 80 for 80%.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendBucketCdnPolicy(dict):
def __init__(__self__, *,
signed_url_cache_max_age_sec: float):
"""
:param float signed_url_cache_max_age_sec: Maximum number of seconds the response to a signed URL request will
be considered fresh. After this time period,
the response will be revalidated before being served.
When serving responses to signed URL requests,
Cloud CDN will internally behave as though
all responses from this backend had a "Cache-Control: public,
max-age=[TTL]" header, regardless of any existing Cache-Control
header. The actual headers served in responses will not be altered.
"""
pulumi.set(__self__, "signed_url_cache_max_age_sec", signed_url_cache_max_age_sec)
@property
@pulumi.getter(name="signedUrlCacheMaxAgeSec")
def signed_url_cache_max_age_sec(self) -> float:
"""
Maximum number of seconds the response to a signed URL request will
be considered fresh. After this time period,
the response will be revalidated before being served.
When serving responses to signed URL requests,
Cloud CDN will internally behave as though
all responses from this backend had a "Cache-Control: public,
max-age=[TTL]" header, regardless of any existing Cache-Control
header. The actual headers served in responses will not be altered.
"""
return pulumi.get(self, "signed_url_cache_max_age_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceBackend(dict):
def __init__(__self__, *,
group: str,
balancing_mode: Optional[str] = None,
capacity_scaler: Optional[float] = None,
description: Optional[str] = None,
max_connections: Optional[float] = None,
max_connections_per_endpoint: Optional[float] = None,
max_connections_per_instance: Optional[float] = None,
max_rate: Optional[float] = None,
max_rate_per_endpoint: Optional[float] = None,
max_rate_per_instance: Optional[float] = None,
max_utilization: Optional[float] = None):
"""
:param str group: The fully-qualified URL of an Instance Group or Network Endpoint
Group resource. In case of instance group this defines the list
of instances that serve traffic. Member virtual machine
instances from each instance group must live in the same zone as
the instance group itself. No two backends in a backend service
are allowed to use same Instance Group resource.
For Network Endpoint Groups this defines list of endpoints. All
endpoints of Network Endpoint Group must be hosted on instances
located in the same zone as the Network Endpoint Group.
Backend services cannot mix Instance Group and
Network Endpoint Group backends.
Note that you must specify an Instance Group or Network Endpoint
Group resource using the fully-qualified URL, rather than a
partial URL.
:param str balancing_mode: Specifies the balancing mode for this backend.
For global HTTP(S) or TCP/SSL load balancing, the default is
UTILIZATION. Valid values are UTILIZATION, RATE (for HTTP(S))
and CONNECTION (for TCP/SSL).
Default value is `UTILIZATION`.
Possible values are `UTILIZATION`, `RATE`, and `CONNECTION`.
:param float capacity_scaler: A multiplier applied to the group's maximum servicing capacity
(based on UTILIZATION, RATE or CONNECTION).
Default value is 1, which means the group will serve up to 100%
of its configured capacity (depending on balancingMode). A
setting of 0 means the group is completely drained, offering
0% of its available Capacity. Valid range is [0.0,1.0].
:param str description: An optional description of this resource.
Provide this property when you create the resource.
:param float max_connections: The maximum number of connections to the backend cluster.
Defaults to 1024.
:param float max_connections_per_endpoint: The max number of simultaneous connections that a single backend
network endpoint can handle. This is used to calculate the
capacity of the group. Can be used in either CONNECTION or
UTILIZATION balancing modes.
For CONNECTION mode, either
maxConnections or maxConnectionsPerEndpoint must be set.
:param float max_connections_per_instance: The max number of simultaneous connections that a single
backend instance can handle. This is used to calculate the
capacity of the group. Can be used in either CONNECTION or
UTILIZATION balancing modes.
For CONNECTION mode, either maxConnections or
maxConnectionsPerInstance must be set.
:param float max_rate: The max requests per second (RPS) of the group.
Can be used with either RATE or UTILIZATION balancing modes,
but required if RATE mode. For RATE mode, either maxRate or one
of maxRatePerInstance or maxRatePerEndpoint, as appropriate for
group type, must be set.
:param float max_rate_per_endpoint: The max requests per second (RPS) that a single backend network
endpoint can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerEndpoint must be set.
:param float max_rate_per_instance: The max requests per second (RPS) that a single backend
instance can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerInstance must be set.
:param float max_utilization: Used when balancingMode is UTILIZATION. This ratio defines the
CPU utilization target for the group. The default is 0.8. Valid
range is [0.0, 1.0].
"""
pulumi.set(__self__, "group", group)
if balancing_mode is not None:
pulumi.set(__self__, "balancing_mode", balancing_mode)
if capacity_scaler is not None:
pulumi.set(__self__, "capacity_scaler", capacity_scaler)
if description is not None:
pulumi.set(__self__, "description", description)
if max_connections is not None:
pulumi.set(__self__, "max_connections", max_connections)
if max_connections_per_endpoint is not None:
pulumi.set(__self__, "max_connections_per_endpoint", max_connections_per_endpoint)
if max_connections_per_instance is not None:
pulumi.set(__self__, "max_connections_per_instance", max_connections_per_instance)
if max_rate is not None:
pulumi.set(__self__, "max_rate", max_rate)
if max_rate_per_endpoint is not None:
pulumi.set(__self__, "max_rate_per_endpoint", max_rate_per_endpoint)
if max_rate_per_instance is not None:
pulumi.set(__self__, "max_rate_per_instance", max_rate_per_instance)
if max_utilization is not None:
pulumi.set(__self__, "max_utilization", max_utilization)
@property
@pulumi.getter
def group(self) -> str:
"""
The fully-qualified URL of an Instance Group or Network Endpoint
Group resource. In case of instance group this defines the list
of instances that serve traffic. Member virtual machine
instances from each instance group must live in the same zone as
the instance group itself. No two backends in a backend service
are allowed to use same Instance Group resource.
For Network Endpoint Groups this defines list of endpoints. All
endpoints of Network Endpoint Group must be hosted on instances
located in the same zone as the Network Endpoint Group.
Backend services cannot mix Instance Group and
Network Endpoint Group backends.
Note that you must specify an Instance Group or Network Endpoint
Group resource using the fully-qualified URL, rather than a
partial URL.
"""
return pulumi.get(self, "group")
@property
@pulumi.getter(name="balancingMode")
def balancing_mode(self) -> Optional[str]:
"""
Specifies the balancing mode for this backend.
For global HTTP(S) or TCP/SSL load balancing, the default is
UTILIZATION. Valid values are UTILIZATION, RATE (for HTTP(S))
and CONNECTION (for TCP/SSL).
Default value is `UTILIZATION`.
Possible values are `UTILIZATION`, `RATE`, and `CONNECTION`.
"""
return pulumi.get(self, "balancing_mode")
@property
@pulumi.getter(name="capacityScaler")
def capacity_scaler(self) -> Optional[float]:
"""
A multiplier applied to the group's maximum servicing capacity
(based on UTILIZATION, RATE or CONNECTION).
Default value is 1, which means the group will serve up to 100%
of its configured capacity (depending on balancingMode). A
setting of 0 means the group is completely drained, offering
0% of its available Capacity. Valid range is [0.0,1.0].
"""
return pulumi.get(self, "capacity_scaler")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of this resource.
Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="maxConnections")
def max_connections(self) -> Optional[float]:
"""
The maximum number of connections to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_connections")
@property
@pulumi.getter(name="maxConnectionsPerEndpoint")
def max_connections_per_endpoint(self) -> Optional[float]:
"""
The max number of simultaneous connections that a single backend
network endpoint can handle. This is used to calculate the
capacity of the group. Can be used in either CONNECTION or
UTILIZATION balancing modes.
For CONNECTION mode, either
maxConnections or maxConnectionsPerEndpoint must be set.
"""
return pulumi.get(self, "max_connections_per_endpoint")
@property
@pulumi.getter(name="maxConnectionsPerInstance")
def max_connections_per_instance(self) -> Optional[float]:
"""
The max number of simultaneous connections that a single
backend instance can handle. This is used to calculate the
capacity of the group. Can be used in either CONNECTION or
UTILIZATION balancing modes.
For CONNECTION mode, either maxConnections or
maxConnectionsPerInstance must be set.
"""
return pulumi.get(self, "max_connections_per_instance")
@property
@pulumi.getter(name="maxRate")
def max_rate(self) -> Optional[float]:
"""
The max requests per second (RPS) of the group.
Can be used with either RATE or UTILIZATION balancing modes,
but required if RATE mode. For RATE mode, either maxRate or one
of maxRatePerInstance or maxRatePerEndpoint, as appropriate for
group type, must be set.
"""
return pulumi.get(self, "max_rate")
@property
@pulumi.getter(name="maxRatePerEndpoint")
def max_rate_per_endpoint(self) -> Optional[float]:
"""
The max requests per second (RPS) that a single backend network
endpoint can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerEndpoint must be set.
"""
return pulumi.get(self, "max_rate_per_endpoint")
@property
@pulumi.getter(name="maxRatePerInstance")
def max_rate_per_instance(self) -> Optional[float]:
"""
The max requests per second (RPS) that a single backend
instance can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerInstance must be set.
"""
return pulumi.get(self, "max_rate_per_instance")
@property
@pulumi.getter(name="maxUtilization")
def max_utilization(self) -> Optional[float]:
"""
Used when balancingMode is UTILIZATION. This ratio defines the
CPU utilization target for the group. The default is 0.8. Valid
range is [0.0, 1.0].
"""
return pulumi.get(self, "max_utilization")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceCdnPolicy(dict):
def __init__(__self__, *,
cache_key_policy: Optional['outputs.BackendServiceCdnPolicyCacheKeyPolicy'] = None,
signed_url_cache_max_age_sec: Optional[float] = None):
"""
:param 'BackendServiceCdnPolicyCacheKeyPolicyArgs' cache_key_policy: The CacheKeyPolicy for this CdnPolicy.
Structure is documented below.
:param float signed_url_cache_max_age_sec: Maximum number of seconds the response to a signed URL request
will be considered fresh, defaults to 1hr (3600s). After this
time period, the response will be revalidated before
being served.
When serving responses to signed URL requests, Cloud CDN will
internally behave as though all responses from this backend had a
"Cache-Control: public, max-age=[TTL]" header, regardless of any
existing Cache-Control header. The actual headers served in
responses will not be altered.
"""
if cache_key_policy is not None:
pulumi.set(__self__, "cache_key_policy", cache_key_policy)
if signed_url_cache_max_age_sec is not None:
pulumi.set(__self__, "signed_url_cache_max_age_sec", signed_url_cache_max_age_sec)
@property
@pulumi.getter(name="cacheKeyPolicy")
def cache_key_policy(self) -> Optional['outputs.BackendServiceCdnPolicyCacheKeyPolicy']:
"""
The CacheKeyPolicy for this CdnPolicy.
Structure is documented below.
"""
return pulumi.get(self, "cache_key_policy")
@property
@pulumi.getter(name="signedUrlCacheMaxAgeSec")
def signed_url_cache_max_age_sec(self) -> Optional[float]:
"""
Maximum number of seconds the response to a signed URL request
will be considered fresh, defaults to 1hr (3600s). After this
time period, the response will be revalidated before
being served.
When serving responses to signed URL requests, Cloud CDN will
internally behave as though all responses from this backend had a
"Cache-Control: public, max-age=[TTL]" header, regardless of any
existing Cache-Control header. The actual headers served in
responses will not be altered.
"""
return pulumi.get(self, "signed_url_cache_max_age_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceCdnPolicyCacheKeyPolicy(dict):
def __init__(__self__, *,
include_host: Optional[bool] = None,
include_protocol: Optional[bool] = None,
include_query_string: Optional[bool] = None,
query_string_blacklists: Optional[List[str]] = None,
query_string_whitelists: Optional[List[str]] = None):
"""
:param bool include_host: If true requests to different hosts will be cached separately.
:param bool include_protocol: If true, http and https requests will be cached separately.
:param bool include_query_string: If true, include query string parameters in the cache key
according to query_string_whitelist and
query_string_blacklist. If neither is set, the entire query
string will be included.
If false, the query string will be excluded from the cache
key entirely.
:param List[str] query_string_blacklists: Names of query string parameters to exclude in cache keys.
All other parameters will be included. Either specify
query_string_whitelist or query_string_blacklist, not both.
'&' and '=' will be percent encoded and not treated as
delimiters.
:param List[str] query_string_whitelists: Names of query string parameters to include in cache keys.
All other parameters will be excluded. Either specify
query_string_whitelist or query_string_blacklist, not both.
'&' and '=' will be percent encoded and not treated as
delimiters.
"""
if include_host is not None:
pulumi.set(__self__, "include_host", include_host)
if include_protocol is not None:
pulumi.set(__self__, "include_protocol", include_protocol)
if include_query_string is not None:
pulumi.set(__self__, "include_query_string", include_query_string)
if query_string_blacklists is not None:
pulumi.set(__self__, "query_string_blacklists", query_string_blacklists)
if query_string_whitelists is not None:
pulumi.set(__self__, "query_string_whitelists", query_string_whitelists)
@property
@pulumi.getter(name="includeHost")
def include_host(self) -> Optional[bool]:
"""
If true requests to different hosts will be cached separately.
"""
return pulumi.get(self, "include_host")
@property
@pulumi.getter(name="includeProtocol")
def include_protocol(self) -> Optional[bool]:
"""
If true, http and https requests will be cached separately.
"""
return pulumi.get(self, "include_protocol")
@property
@pulumi.getter(name="includeQueryString")
def include_query_string(self) -> Optional[bool]:
"""
If true, include query string parameters in the cache key
according to query_string_whitelist and
query_string_blacklist. If neither is set, the entire query
string will be included.
If false, the query string will be excluded from the cache
key entirely.
"""
return pulumi.get(self, "include_query_string")
@property
@pulumi.getter(name="queryStringBlacklists")
def query_string_blacklists(self) -> Optional[List[str]]:
"""
Names of query string parameters to exclude in cache keys.
All other parameters will be included. Either specify
query_string_whitelist or query_string_blacklist, not both.
'&' and '=' will be percent encoded and not treated as
delimiters.
"""
return pulumi.get(self, "query_string_blacklists")
@property
@pulumi.getter(name="queryStringWhitelists")
def query_string_whitelists(self) -> Optional[List[str]]:
"""
Names of query string parameters to include in cache keys.
All other parameters will be excluded. Either specify
query_string_whitelist or query_string_blacklist, not both.
'&' and '=' will be percent encoded and not treated as
delimiters.
"""
return pulumi.get(self, "query_string_whitelists")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceCircuitBreakers(dict):
def __init__(__self__, *,
connect_timeout: Optional['outputs.BackendServiceCircuitBreakersConnectTimeout'] = None,
max_connections: Optional[float] = None,
max_pending_requests: Optional[float] = None,
max_requests: Optional[float] = None,
max_requests_per_connection: Optional[float] = None,
max_retries: Optional[float] = None):
"""
:param 'BackendServiceCircuitBreakersConnectTimeoutArgs' connect_timeout: The timeout for new network connections to hosts. Structure is documented below.
:param float max_connections: The maximum number of connections to the backend cluster.
Defaults to 1024.
:param float max_pending_requests: The maximum number of pending requests to the backend cluster.
Defaults to 1024.
:param float max_requests: The maximum number of parallel requests to the backend cluster.
Defaults to 1024.
:param float max_requests_per_connection: Maximum requests for a single backend connection. This parameter
is respected by both the HTTP/1.1 and HTTP/2 implementations. If
not specified, there is no limit. Setting this parameter to 1
will effectively disable keep alive.
:param float max_retries: The maximum number of parallel retries to the backend cluster.
Defaults to 3.
"""
if connect_timeout is not None:
pulumi.set(__self__, "connect_timeout", connect_timeout)
if max_connections is not None:
pulumi.set(__self__, "max_connections", max_connections)
if max_pending_requests is not None:
pulumi.set(__self__, "max_pending_requests", max_pending_requests)
if max_requests is not None:
pulumi.set(__self__, "max_requests", max_requests)
if max_requests_per_connection is not None:
pulumi.set(__self__, "max_requests_per_connection", max_requests_per_connection)
if max_retries is not None:
pulumi.set(__self__, "max_retries", max_retries)
@property
@pulumi.getter(name="connectTimeout")
def connect_timeout(self) -> Optional['outputs.BackendServiceCircuitBreakersConnectTimeout']:
"""
The timeout for new network connections to hosts. Structure is documented below.
"""
return pulumi.get(self, "connect_timeout")
@property
@pulumi.getter(name="maxConnections")
def max_connections(self) -> Optional[float]:
"""
The maximum number of connections to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_connections")
@property
@pulumi.getter(name="maxPendingRequests")
def max_pending_requests(self) -> Optional[float]:
"""
The maximum number of pending requests to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_pending_requests")
@property
@pulumi.getter(name="maxRequests")
def max_requests(self) -> Optional[float]:
"""
The maximum number of parallel requests to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_requests")
@property
@pulumi.getter(name="maxRequestsPerConnection")
def max_requests_per_connection(self) -> Optional[float]:
"""
Maximum requests for a single backend connection. This parameter
is respected by both the HTTP/1.1 and HTTP/2 implementations. If
not specified, there is no limit. Setting this parameter to 1
will effectively disable keep alive.
"""
return pulumi.get(self, "max_requests_per_connection")
@property
@pulumi.getter(name="maxRetries")
def max_retries(self) -> Optional[float]:
"""
The maximum number of parallel retries to the backend cluster.
Defaults to 3.
"""
return pulumi.get(self, "max_retries")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceCircuitBreakersConnectTimeout(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceConsistentHash(dict):
def __init__(__self__, *,
http_cookie: Optional['outputs.BackendServiceConsistentHashHttpCookie'] = None,
http_header_name: Optional[str] = None,
minimum_ring_size: Optional[float] = None):
"""
:param 'BackendServiceConsistentHashHttpCookieArgs' http_cookie: Hash is based on HTTP Cookie. This field describes a HTTP cookie
that will be used as the hash key for the consistent hash load
balancer. If the cookie is not present, it will be generated.
This field is applicable if the sessionAffinity is set to HTTP_COOKIE.
Structure is documented below.
:param str http_header_name: The hash based on the value of the specified header field.
This field is applicable if the sessionAffinity is set to HEADER_FIELD.
:param float minimum_ring_size: The minimum number of virtual nodes to use for the hash ring.
Larger ring sizes result in more granular load
distributions. If the number of hosts in the load balancing pool
is larger than the ring size, each host will be assigned a single
virtual node.
Defaults to 1024.
"""
if http_cookie is not None:
pulumi.set(__self__, "http_cookie", http_cookie)
if http_header_name is not None:
pulumi.set(__self__, "http_header_name", http_header_name)
if minimum_ring_size is not None:
pulumi.set(__self__, "minimum_ring_size", minimum_ring_size)
@property
@pulumi.getter(name="httpCookie")
def http_cookie(self) -> Optional['outputs.BackendServiceConsistentHashHttpCookie']:
"""
Hash is based on HTTP Cookie. This field describes a HTTP cookie
that will be used as the hash key for the consistent hash load
balancer. If the cookie is not present, it will be generated.
This field is applicable if the sessionAffinity is set to HTTP_COOKIE.
Structure is documented below.
"""
return pulumi.get(self, "http_cookie")
@property
@pulumi.getter(name="httpHeaderName")
def http_header_name(self) -> Optional[str]:
"""
The hash based on the value of the specified header field.
This field is applicable if the sessionAffinity is set to HEADER_FIELD.
"""
return pulumi.get(self, "http_header_name")
@property
@pulumi.getter(name="minimumRingSize")
def minimum_ring_size(self) -> Optional[float]:
"""
The minimum number of virtual nodes to use for the hash ring.
Larger ring sizes result in more granular load
distributions. If the number of hosts in the load balancing pool
is larger than the ring size, each host will be assigned a single
virtual node.
Defaults to 1024.
"""
return pulumi.get(self, "minimum_ring_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceConsistentHashHttpCookie(dict):
def __init__(__self__, *,
name: Optional[str] = None,
path: Optional[str] = None,
ttl: Optional['outputs.BackendServiceConsistentHashHttpCookieTtl'] = None):
"""
:param str name: Name of the cookie.
:param str path: Path to set for the cookie.
:param 'BackendServiceConsistentHashHttpCookieTtlArgs' ttl: Lifetime of the cookie.
Structure is documented below.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the cookie.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Path to set for the cookie.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def ttl(self) -> Optional['outputs.BackendServiceConsistentHashHttpCookieTtl']:
"""
Lifetime of the cookie.
Structure is documented below.
"""
return pulumi.get(self, "ttl")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceConsistentHashHttpCookieTtl(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceIap(dict):
def __init__(__self__, *,
oauth2_client_id: str,
oauth2_client_secret: str,
oauth2_client_secret_sha256: Optional[str] = None):
"""
:param str oauth2_client_id: OAuth2 Client ID for IAP
:param str oauth2_client_secret: OAuth2 Client Secret for IAP
**Note**: This property is sensitive and will not be displayed in the plan.
:param str oauth2_client_secret_sha256: -
OAuth2 Client Secret SHA-256 for IAP
**Note**: This property is sensitive and will not be displayed in the plan.
"""
pulumi.set(__self__, "oauth2_client_id", oauth2_client_id)
pulumi.set(__self__, "oauth2_client_secret", oauth2_client_secret)
if oauth2_client_secret_sha256 is not None:
pulumi.set(__self__, "oauth2_client_secret_sha256", oauth2_client_secret_sha256)
@property
@pulumi.getter(name="oauth2ClientId")
def oauth2_client_id(self) -> str:
"""
OAuth2 Client ID for IAP
"""
return pulumi.get(self, "oauth2_client_id")
@property
@pulumi.getter(name="oauth2ClientSecret")
def oauth2_client_secret(self) -> str:
"""
OAuth2 Client Secret for IAP
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "oauth2_client_secret")
@property
@pulumi.getter(name="oauth2ClientSecretSha256")
def oauth2_client_secret_sha256(self) -> Optional[str]:
"""
-
OAuth2 Client Secret SHA-256 for IAP
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "oauth2_client_secret_sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceLogConfig(dict):
def __init__(__self__, *,
enable: Optional[bool] = None,
sample_rate: Optional[float] = None):
"""
:param bool enable: Whether to enable logging for the load balancer traffic served by this backend service.
:param float sample_rate: This field can only be specified if logging is enabled for this backend service. The value of
the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer
where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported.
The default value is 1.0.
"""
if enable is not None:
pulumi.set(__self__, "enable", enable)
if sample_rate is not None:
pulumi.set(__self__, "sample_rate", sample_rate)
@property
@pulumi.getter
def enable(self) -> Optional[bool]:
"""
Whether to enable logging for the load balancer traffic served by this backend service.
"""
return pulumi.get(self, "enable")
@property
@pulumi.getter(name="sampleRate")
def sample_rate(self) -> Optional[float]:
"""
This field can only be specified if logging is enabled for this backend service. The value of
the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer
where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported.
The default value is 1.0.
"""
return pulumi.get(self, "sample_rate")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceOutlierDetection(dict):
def __init__(__self__, *,
base_ejection_time: Optional['outputs.BackendServiceOutlierDetectionBaseEjectionTime'] = None,
consecutive_errors: Optional[float] = None,
consecutive_gateway_failure: Optional[float] = None,
enforcing_consecutive_errors: Optional[float] = None,
enforcing_consecutive_gateway_failure: Optional[float] = None,
enforcing_success_rate: Optional[float] = None,
interval: Optional['outputs.BackendServiceOutlierDetectionInterval'] = None,
max_ejection_percent: Optional[float] = None,
success_rate_minimum_hosts: Optional[float] = None,
success_rate_request_volume: Optional[float] = None,
success_rate_stdev_factor: Optional[float] = None):
"""
:param 'BackendServiceOutlierDetectionBaseEjectionTimeArgs' base_ejection_time: The base time that a host is ejected for. The real time is equal to the base
time multiplied by the number of times the host has been ejected. Defaults to
30000ms or 30s.
Structure is documented below.
:param float consecutive_errors: Number of errors before a host is ejected from the connection pool. When the
backend host is accessed over HTTP, a 5xx return code qualifies as an error.
Defaults to 5.
:param float consecutive_gateway_failure: The number of consecutive gateway failures (502, 503, 504 status or connection
errors that are mapped to one of those status codes) before a consecutive
gateway failure ejection occurs. Defaults to 5.
:param float enforcing_consecutive_errors: The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive 5xx. This setting can be used to disable
ejection or to ramp it up slowly. Defaults to 100.
:param float enforcing_consecutive_gateway_failure: The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive gateway failures. This setting can be
used to disable ejection or to ramp it up slowly. Defaults to 0.
:param float enforcing_success_rate: The percentage chance that a host will be actually ejected when an outlier
status is detected through success rate statistics. This setting can be used to
disable ejection or to ramp it up slowly. Defaults to 100.
:param 'BackendServiceOutlierDetectionIntervalArgs' interval: Time interval between ejection sweep analysis. This can result in both new
ejections as well as hosts being returned to service. Defaults to 10 seconds.
Structure is documented below.
:param float max_ejection_percent: Maximum percentage of hosts in the load balancing pool for the backend service
that can be ejected. Defaults to 10%.
:param float success_rate_minimum_hosts: The number of hosts in a cluster that must have enough request volume to detect
success rate outliers. If the number of hosts is less than this setting, outlier
detection via success rate statistics is not performed for any host in the
cluster. Defaults to 5.
:param float success_rate_request_volume: The minimum number of total requests that must be collected in one interval (as
defined by the interval duration above) to include this host in success rate
based outlier detection. If the volume is lower than this setting, outlier
detection via success rate statistics is not performed for that host. Defaults
to 100.
:param float success_rate_stdev_factor: This factor is used to determine the ejection threshold for success rate outlier
ejection. The ejection threshold is the difference between the mean success
rate, and the product of this factor and the standard deviation of the mean
success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided
by a thousand to get a double. That is, if the desired factor is 1.9, the
runtime value should be 1900. Defaults to 1900.
"""
if base_ejection_time is not None:
pulumi.set(__self__, "base_ejection_time", base_ejection_time)
if consecutive_errors is not None:
pulumi.set(__self__, "consecutive_errors", consecutive_errors)
if consecutive_gateway_failure is not None:
pulumi.set(__self__, "consecutive_gateway_failure", consecutive_gateway_failure)
if enforcing_consecutive_errors is not None:
pulumi.set(__self__, "enforcing_consecutive_errors", enforcing_consecutive_errors)
if enforcing_consecutive_gateway_failure is not None:
pulumi.set(__self__, "enforcing_consecutive_gateway_failure", enforcing_consecutive_gateway_failure)
if enforcing_success_rate is not None:
pulumi.set(__self__, "enforcing_success_rate", enforcing_success_rate)
if interval is not None:
pulumi.set(__self__, "interval", interval)
if max_ejection_percent is not None:
pulumi.set(__self__, "max_ejection_percent", max_ejection_percent)
if success_rate_minimum_hosts is not None:
pulumi.set(__self__, "success_rate_minimum_hosts", success_rate_minimum_hosts)
if success_rate_request_volume is not None:
pulumi.set(__self__, "success_rate_request_volume", success_rate_request_volume)
if success_rate_stdev_factor is not None:
pulumi.set(__self__, "success_rate_stdev_factor", success_rate_stdev_factor)
@property
@pulumi.getter(name="baseEjectionTime")
def base_ejection_time(self) -> Optional['outputs.BackendServiceOutlierDetectionBaseEjectionTime']:
"""
The base time that a host is ejected for. The real time is equal to the base
time multiplied by the number of times the host has been ejected. Defaults to
30000ms or 30s.
Structure is documented below.
"""
return pulumi.get(self, "base_ejection_time")
@property
@pulumi.getter(name="consecutiveErrors")
def consecutive_errors(self) -> Optional[float]:
"""
Number of errors before a host is ejected from the connection pool. When the
backend host is accessed over HTTP, a 5xx return code qualifies as an error.
Defaults to 5.
"""
return pulumi.get(self, "consecutive_errors")
@property
@pulumi.getter(name="consecutiveGatewayFailure")
def consecutive_gateway_failure(self) -> Optional[float]:
"""
The number of consecutive gateway failures (502, 503, 504 status or connection
errors that are mapped to one of those status codes) before a consecutive
gateway failure ejection occurs. Defaults to 5.
"""
return pulumi.get(self, "consecutive_gateway_failure")
@property
@pulumi.getter(name="enforcingConsecutiveErrors")
def enforcing_consecutive_errors(self) -> Optional[float]:
"""
The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive 5xx. This setting can be used to disable
ejection or to ramp it up slowly. Defaults to 100.
"""
return pulumi.get(self, "enforcing_consecutive_errors")
@property
@pulumi.getter(name="enforcingConsecutiveGatewayFailure")
def enforcing_consecutive_gateway_failure(self) -> Optional[float]:
"""
The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive gateway failures. This setting can be
used to disable ejection or to ramp it up slowly. Defaults to 0.
"""
return pulumi.get(self, "enforcing_consecutive_gateway_failure")
@property
@pulumi.getter(name="enforcingSuccessRate")
def enforcing_success_rate(self) -> Optional[float]:
"""
The percentage chance that a host will be actually ejected when an outlier
status is detected through success rate statistics. This setting can be used to
disable ejection or to ramp it up slowly. Defaults to 100.
"""
return pulumi.get(self, "enforcing_success_rate")
@property
@pulumi.getter
def interval(self) -> Optional['outputs.BackendServiceOutlierDetectionInterval']:
"""
Time interval between ejection sweep analysis. This can result in both new
ejections as well as hosts being returned to service. Defaults to 10 seconds.
Structure is documented below.
"""
return pulumi.get(self, "interval")
@property
@pulumi.getter(name="maxEjectionPercent")
def max_ejection_percent(self) -> Optional[float]:
"""
Maximum percentage of hosts in the load balancing pool for the backend service
that can be ejected. Defaults to 10%.
"""
return pulumi.get(self, "max_ejection_percent")
@property
@pulumi.getter(name="successRateMinimumHosts")
def success_rate_minimum_hosts(self) -> Optional[float]:
"""
The number of hosts in a cluster that must have enough request volume to detect
success rate outliers. If the number of hosts is less than this setting, outlier
detection via success rate statistics is not performed for any host in the
cluster. Defaults to 5.
"""
return pulumi.get(self, "success_rate_minimum_hosts")
@property
@pulumi.getter(name="successRateRequestVolume")
def success_rate_request_volume(self) -> Optional[float]:
"""
The minimum number of total requests that must be collected in one interval (as
defined by the interval duration above) to include this host in success rate
based outlier detection. If the volume is lower than this setting, outlier
detection via success rate statistics is not performed for that host. Defaults
to 100.
"""
return pulumi.get(self, "success_rate_request_volume")
@property
@pulumi.getter(name="successRateStdevFactor")
def success_rate_stdev_factor(self) -> Optional[float]:
"""
This factor is used to determine the ejection threshold for success rate outlier
ejection. The ejection threshold is the difference between the mean success
rate, and the product of this factor and the standard deviation of the mean
success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided
by a thousand to get a double. That is, if the desired factor is 1.9, the
runtime value should be 1900. Defaults to 1900.
"""
return pulumi.get(self, "success_rate_stdev_factor")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceOutlierDetectionBaseEjectionTime(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class BackendServiceOutlierDetectionInterval(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiskDiskEncryptionKey(dict):
def __init__(__self__, *,
kms_key_self_link: Optional[str] = None,
kms_key_service_account: Optional[str] = None,
raw_key: Optional[str] = None,
sha256: Optional[str] = None):
"""
:param str kms_key_self_link: The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
(`service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com`) must have
`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys
:param str kms_key_service_account: The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
:param str sha256: -
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if kms_key_service_account is not None:
pulumi.set(__self__, "kms_key_service_account", kms_key_service_account)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
if sha256 is not None:
pulumi.set(__self__, "sha256", sha256)
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
"""
The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
(`service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com`) must have
`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys
"""
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter(name="kmsKeyServiceAccount")
def kms_key_service_account(self) -> Optional[str]:
"""
The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
"""
return pulumi.get(self, "kms_key_service_account")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
"""
return pulumi.get(self, "raw_key")
@property
@pulumi.getter
def sha256(self) -> Optional[str]:
"""
-
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
return pulumi.get(self, "sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiskIamBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiskIamMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiskSourceImageEncryptionKey(dict):
def __init__(__self__, *,
kms_key_self_link: Optional[str] = None,
kms_key_service_account: Optional[str] = None,
raw_key: Optional[str] = None,
sha256: Optional[str] = None):
"""
:param str kms_key_self_link: The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
(`service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com`) must have
`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys
:param str kms_key_service_account: The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
:param str sha256: -
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if kms_key_service_account is not None:
pulumi.set(__self__, "kms_key_service_account", kms_key_service_account)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
if sha256 is not None:
pulumi.set(__self__, "sha256", sha256)
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
"""
The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
(`service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com`) must have
`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys
"""
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter(name="kmsKeyServiceAccount")
def kms_key_service_account(self) -> Optional[str]:
"""
The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
"""
return pulumi.get(self, "kms_key_service_account")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
"""
return pulumi.get(self, "raw_key")
@property
@pulumi.getter
def sha256(self) -> Optional[str]:
"""
-
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
return pulumi.get(self, "sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class DiskSourceSnapshotEncryptionKey(dict):
def __init__(__self__, *,
kms_key_self_link: Optional[str] = None,
raw_key: Optional[str] = None,
sha256: Optional[str] = None):
"""
:param str kms_key_self_link: The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
(`service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com`) must have
`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
:param str sha256: -
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
if sha256 is not None:
pulumi.set(__self__, "sha256", sha256)
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
"""
The self link of the encryption key used to encrypt the disk. Also called KmsKeyName
in the cloud console. Your project's Compute Engine System service account
(`service-{{PROJECT_NUMBER}}@compute-system.iam.gserviceaccount.com`) must have
`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature.
See https://cloud.google.com/compute/docs/disks/customer-managed-encryption#encrypt_a_new_persistent_disk_with_your_own_keys
"""
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
"""
return pulumi.get(self, "raw_key")
@property
@pulumi.getter
def sha256(self) -> Optional[str]:
"""
-
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
return pulumi.get(self, "sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ExternalVpnGatewayInterface(dict):
def __init__(__self__, *,
id: Optional[float] = None,
ip_address: Optional[str] = None):
"""
:param float id: The numberic ID for this interface. Allowed values are based on the redundancy type
of this external VPN gateway
* `0 - SINGLE_IP_INTERNALLY_REDUNDANT`
* `0, 1 - TWO_IPS_REDUNDANCY`
* `0, 1, 2, 3 - FOUR_IPS_REDUNDANCY`
:param str ip_address: IP address of the interface in the external VPN gateway.
Only IPv4 is supported. This IP address can be either from
your on-premise gateway or another Cloud provider's VPN gateway,
it cannot be an IP address from Google Compute Engine.
"""
if id is not None:
pulumi.set(__self__, "id", id)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
The numberic ID for this interface. Allowed values are based on the redundancy type
of this external VPN gateway
* `0 - SINGLE_IP_INTERNALLY_REDUNDANT`
* `0, 1 - TWO_IPS_REDUNDANCY`
* `0, 1, 2, 3 - FOUR_IPS_REDUNDANCY`
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
"""
IP address of the interface in the external VPN gateway.
Only IPv4 is supported. This IP address can be either from
your on-premise gateway or another Cloud provider's VPN gateway,
it cannot be an IP address from Google Compute Engine.
"""
return pulumi.get(self, "ip_address")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FirewallAllow(dict):
def __init__(__self__, *,
protocol: str,
ports: Optional[List[str]] = None):
"""
:param str protocol: The IP protocol to which this rule applies. The protocol type is
required when creating a firewall rule. This value can either be
one of the following well known protocol strings (tcp, udp,
icmp, esp, ah, sctp, ipip, all), or the IP protocol number.
:param List[str] ports: An optional list of ports to which this rule applies. This field
is only applicable for UDP or TCP protocol. Each entry must be
either an integer or a range. If not specified, this rule
applies to connections through any port.
Example inputs include: ["22"], ["80","443"], and
["12345-12349"].
"""
pulumi.set(__self__, "protocol", protocol)
if ports is not None:
pulumi.set(__self__, "ports", ports)
@property
@pulumi.getter
def protocol(self) -> str:
"""
The IP protocol to which this rule applies. The protocol type is
required when creating a firewall rule. This value can either be
one of the following well known protocol strings (tcp, udp,
icmp, esp, ah, sctp, ipip, all), or the IP protocol number.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def ports(self) -> Optional[List[str]]:
"""
An optional list of ports to which this rule applies. This field
is only applicable for UDP or TCP protocol. Each entry must be
either an integer or a range. If not specified, this rule
applies to connections through any port.
Example inputs include: ["22"], ["80","443"], and
["12345-12349"].
"""
return pulumi.get(self, "ports")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FirewallDeny(dict):
def __init__(__self__, *,
protocol: str,
ports: Optional[List[str]] = None):
"""
:param str protocol: The IP protocol to which this rule applies. The protocol type is
required when creating a firewall rule. This value can either be
one of the following well known protocol strings (tcp, udp,
icmp, esp, ah, sctp, ipip, all), or the IP protocol number.
:param List[str] ports: An optional list of ports to which this rule applies. This field
is only applicable for UDP or TCP protocol. Each entry must be
either an integer or a range. If not specified, this rule
applies to connections through any port.
Example inputs include: ["22"], ["80","443"], and
["12345-12349"].
"""
pulumi.set(__self__, "protocol", protocol)
if ports is not None:
pulumi.set(__self__, "ports", ports)
@property
@pulumi.getter
def protocol(self) -> str:
"""
The IP protocol to which this rule applies. The protocol type is
required when creating a firewall rule. This value can either be
one of the following well known protocol strings (tcp, udp,
icmp, esp, ah, sctp, ipip, all), or the IP protocol number.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def ports(self) -> Optional[List[str]]:
"""
An optional list of ports to which this rule applies. This field
is only applicable for UDP or TCP protocol. Each entry must be
either an integer or a range. If not specified, this rule
applies to connections through any port.
Example inputs include: ["22"], ["80","443"], and
["12345-12349"].
"""
return pulumi.get(self, "ports")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class FirewallLogConfig(dict):
def __init__(__self__, *,
metadata: str):
"""
:param str metadata: This field denotes whether to include or exclude metadata for firewall logs.
Possible values are `EXCLUDE_ALL_METADATA` and `INCLUDE_ALL_METADATA`.
"""
pulumi.set(__self__, "metadata", metadata)
@property
@pulumi.getter
def metadata(self) -> str:
"""
This field denotes whether to include or exclude metadata for firewall logs.
Possible values are `EXCLUDE_ALL_METADATA` and `INCLUDE_ALL_METADATA`.
"""
return pulumi.get(self, "metadata")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class GlobalForwardingRuleMetadataFilter(dict):
def __init__(__self__, *,
filter_labels: List['outputs.GlobalForwardingRuleMetadataFilterFilterLabel'],
filter_match_criteria: str):
"""
:param List['GlobalForwardingRuleMetadataFilterFilterLabelArgs'] filter_labels: The list of label value pairs that must match labels in the
provided metadata based on filterMatchCriteria
This list must not be empty and can have at the most 64 entries.
Structure is documented below.
:param str filter_match_criteria: Specifies how individual filterLabel matches within the list of
filterLabels contribute towards the overall metadataFilter match.
MATCH_ANY - At least one of the filterLabels must have a matching
label in the provided metadata.
MATCH_ALL - All filterLabels must have matching labels in the
provided metadata.
Possible values are `MATCH_ANY` and `MATCH_ALL`.
"""
pulumi.set(__self__, "filter_labels", filter_labels)
pulumi.set(__self__, "filter_match_criteria", filter_match_criteria)
@property
@pulumi.getter(name="filterLabels")
def filter_labels(self) -> List['outputs.GlobalForwardingRuleMetadataFilterFilterLabel']:
"""
The list of label value pairs that must match labels in the
provided metadata based on filterMatchCriteria
This list must not be empty and can have at the most 64 entries.
Structure is documented below.
"""
return pulumi.get(self, "filter_labels")
@property
@pulumi.getter(name="filterMatchCriteria")
def filter_match_criteria(self) -> str:
"""
Specifies how individual filterLabel matches within the list of
filterLabels contribute towards the overall metadataFilter match.
MATCH_ANY - At least one of the filterLabels must have a matching
label in the provided metadata.
MATCH_ALL - All filterLabels must have matching labels in the
provided metadata.
Possible values are `MATCH_ANY` and `MATCH_ALL`.
"""
return pulumi.get(self, "filter_match_criteria")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class GlobalForwardingRuleMetadataFilterFilterLabel(dict):
def __init__(__self__, *,
name: str,
value: str):
"""
:param str name: Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
:param str value: The value that the label must match. The value has a maximum
length of 1024 characters.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the metadata label. The length must be between
1 and 1024 characters, inclusive.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
The value that the label must match. The value has a maximum
length of 1024 characters.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HaVpnGatewayVpnInterface(dict):
def __init__(__self__, *,
id: Optional[float] = None,
ip_address: Optional[str] = None):
"""
:param float id: an identifier for the resource with format `projects/{{project}}/regions/{{region}}/vpnGateways/{{name}}`
"""
if id is not None:
pulumi.set(__self__, "id", id)
if ip_address is not None:
pulumi.set(__self__, "ip_address", ip_address)
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
an identifier for the resource with format `projects/{{project}}/regions/{{region}}/vpnGateways/{{name}}`
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> Optional[str]:
return pulumi.get(self, "ip_address")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckGrpcHealthCheck(dict):
def __init__(__self__, *,
grpc_service_name: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None):
"""
:param str grpc_service_name: The gRPC service name for the health check.
The value of grpcServiceName has the following meanings by convention:
- Empty serviceName means the overall status of all services at the backend.
- Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service.
The grpcServiceName can only be ASCII.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
if grpc_service_name is not None:
pulumi.set(__self__, "grpc_service_name", grpc_service_name)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
@property
@pulumi.getter(name="grpcServiceName")
def grpc_service_name(self) -> Optional[str]:
"""
The gRPC service name for the health check.
The value of grpcServiceName has the following meanings by convention:
- Empty serviceName means the overall status of all services at the backend.
- Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service.
The grpcServiceName can only be ASCII.
"""
return pulumi.get(self, "grpc_service_name")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckHttp2HealthCheck(dict):
def __init__(__self__, *,
host: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request_path: Optional[str] = None,
response: Optional[str] = None):
"""
:param str host: The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request_path: The request path of the HTTP2 health check request.
The default value is /.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
The request path of the HTTP2 health check request.
The default value is /.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckHttpHealthCheck(dict):
def __init__(__self__, *,
host: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request_path: Optional[str] = None,
response: Optional[str] = None):
"""
:param str host: The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request_path: The request path of the HTTP2 health check request.
The default value is /.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
The request path of the HTTP2 health check request.
The default value is /.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckHttpsHealthCheck(dict):
def __init__(__self__, *,
host: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request_path: Optional[str] = None,
response: Optional[str] = None):
"""
:param str host: The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request_path: The request path of the HTTP2 health check request.
The default value is /.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
The request path of the HTTP2 health check request.
The default value is /.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckLogConfig(dict):
def __init__(__self__, *,
enable: Optional[bool] = None):
"""
:param bool enable: Indicates whether or not to export logs. This is false by default,
which means no health check logging will be done.
"""
if enable is not None:
pulumi.set(__self__, "enable", enable)
@property
@pulumi.getter
def enable(self) -> Optional[bool]:
"""
Indicates whether or not to export logs. This is false by default,
which means no health check logging will be done.
"""
return pulumi.get(self, "enable")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckSslHealthCheck(dict):
def __init__(__self__, *,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request: Optional[str] = None,
response: Optional[str] = None):
"""
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request: The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request is not None:
pulumi.set(__self__, "request", request)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter
def request(self) -> Optional[str]:
"""
The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
"""
return pulumi.get(self, "request")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class HealthCheckTcpHealthCheck(dict):
def __init__(__self__, *,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request: Optional[str] = None,
response: Optional[str] = None):
"""
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request: The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request is not None:
pulumi.set(__self__, "request", request)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter
def request(self) -> Optional[str]:
"""
The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
"""
return pulumi.get(self, "request")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ImageGuestOsFeature(dict):
def __init__(__self__, *,
type: str):
"""
:param str type: The type of supported feature. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options.
Possible values are `MULTI_IP_SUBNET`, `SECURE_BOOT`, `UEFI_COMPATIBLE`, `VIRTIO_SCSI_MULTIQUEUE`, and `WINDOWS`.
"""
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of supported feature. Read [Enabling guest operating system features](https://cloud.google.com/compute/docs/images/create-delete-deprecate-private-images#guest-os-features) to see a list of available options.
Possible values are `MULTI_IP_SUBNET`, `SECURE_BOOT`, `UEFI_COMPATIBLE`, `VIRTIO_SCSI_MULTIQUEUE`, and `WINDOWS`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ImageIamBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str title: A title for the expression, i.e. a short string describing its purpose.
:param str description: An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
"""
A title for the expression, i.e. a short string describing its purpose.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ImageIamMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str title: A title for the expression, i.e. a short string describing its purpose.
:param str description: An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
"""
A title for the expression, i.e. a short string describing its purpose.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ImageRawDisk(dict):
def __init__(__self__, *,
source: str,
container_type: Optional[str] = None,
sha1: Optional[str] = None):
"""
:param str source: The full Google Cloud Storage URL where disk storage is stored
You must provide either this property or the sourceDisk property
but not both.
:param str container_type: The format used to encode and transmit the block device, which
should be TAR. This is just a container and transmission format
and not a runtime format. Provided by the client when the disk
image is created.
Default value is `TAR`.
Possible values are `TAR`.
:param str sha1: An optional SHA1 checksum of the disk image before unpackaging.
This is provided by the client when the disk image is created.
"""
pulumi.set(__self__, "source", source)
if container_type is not None:
pulumi.set(__self__, "container_type", container_type)
if sha1 is not None:
pulumi.set(__self__, "sha1", sha1)
@property
@pulumi.getter
def source(self) -> str:
"""
The full Google Cloud Storage URL where disk storage is stored
You must provide either this property or the sourceDisk property
but not both.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="containerType")
def container_type(self) -> Optional[str]:
"""
The format used to encode and transmit the block device, which
should be TAR. This is just a container and transmission format
and not a runtime format. Provided by the client when the disk
image is created.
Default value is `TAR`.
Possible values are `TAR`.
"""
return pulumi.get(self, "container_type")
@property
@pulumi.getter
def sha1(self) -> Optional[str]:
"""
An optional SHA1 checksum of the disk image before unpackaging.
This is provided by the client when the disk image is created.
"""
return pulumi.get(self, "sha1")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceAttachedDisk(dict):
def __init__(__self__, *,
source: str,
device_name: Optional[str] = None,
disk_encryption_key_raw: Optional[str] = None,
disk_encryption_key_sha256: Optional[str] = None,
kms_key_self_link: Optional[str] = None,
mode: Optional[str] = None):
"""
:param str source: The name or self_link of the disk to attach to this instance.
:param str device_name: Name with which the attached disk will be accessible
under `/dev/disk/by-id/google-*`
:param str disk_encryption_key_raw: A 256-bit [customer-supplied encryption key]
(https://cloud.google.com/compute/docs/disks/customer-supplied-encryption),
encoded in [RFC 4648 base64](https://tools.ietf.org/html/rfc4648#section-4)
to encrypt this disk. Only one of `kms_key_self_link` and `disk_encryption_key_raw` may be set.
:param str kms_key_self_link: The self_link of the encryption key that is
stored in Google Cloud KMS to encrypt this disk. Only one of `kms_key_self_link`
and `disk_encryption_key_raw` may be set.
:param str mode: Either "READ_ONLY" or "READ_WRITE", defaults to "READ_WRITE"
If you have a persistent disk with data that you want to share
between multiple instances, detach it from any read-write instances and
attach it to one or more instances in read-only mode.
"""
pulumi.set(__self__, "source", source)
if device_name is not None:
pulumi.set(__self__, "device_name", device_name)
if disk_encryption_key_raw is not None:
pulumi.set(__self__, "disk_encryption_key_raw", disk_encryption_key_raw)
if disk_encryption_key_sha256 is not None:
pulumi.set(__self__, "disk_encryption_key_sha256", disk_encryption_key_sha256)
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if mode is not None:
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def source(self) -> str:
"""
The name or self_link of the disk to attach to this instance.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> Optional[str]:
"""
Name with which the attached disk will be accessible
under `/dev/disk/by-id/google-*`
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKeyRaw")
def disk_encryption_key_raw(self) -> Optional[str]:
"""
A 256-bit [customer-supplied encryption key]
(https://cloud.google.com/compute/docs/disks/customer-supplied-encryption),
encoded in [RFC 4648 base64](https://tools.ietf.org/html/rfc4648#section-4)
to encrypt this disk. Only one of `kms_key_self_link` and `disk_encryption_key_raw` may be set.
"""
return pulumi.get(self, "disk_encryption_key_raw")
@property
@pulumi.getter(name="diskEncryptionKeySha256")
def disk_encryption_key_sha256(self) -> Optional[str]:
return pulumi.get(self, "disk_encryption_key_sha256")
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
"""
The self_link of the encryption key that is
stored in Google Cloud KMS to encrypt this disk. Only one of `kms_key_self_link`
and `disk_encryption_key_raw` may be set.
"""
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
Either "READ_ONLY" or "READ_WRITE", defaults to "READ_WRITE"
If you have a persistent disk with data that you want to share
between multiple instances, detach it from any read-write instances and
attach it to one or more instances in read-only mode.
"""
return pulumi.get(self, "mode")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceBootDisk(dict):
def __init__(__self__, *,
auto_delete: Optional[bool] = None,
device_name: Optional[str] = None,
disk_encryption_key_raw: Optional[str] = None,
disk_encryption_key_sha256: Optional[str] = None,
initialize_params: Optional['outputs.InstanceBootDiskInitializeParams'] = None,
kms_key_self_link: Optional[str] = None,
mode: Optional[str] = None,
source: Optional[str] = None):
"""
:param bool auto_delete: Whether the disk will be auto-deleted when the instance
is deleted. Defaults to true.
:param str device_name: Name with which the attached disk will be accessible
under `/dev/disk/by-id/google-*`
:param str disk_encryption_key_raw: A 256-bit [customer-supplied encryption key]
(https://cloud.google.com/compute/docs/disks/customer-supplied-encryption),
encoded in [RFC 4648 base64](https://tools.ietf.org/html/rfc4648#section-4)
to encrypt this disk. Only one of `kms_key_self_link` and `disk_encryption_key_raw` may be set.
:param 'InstanceBootDiskInitializeParamsArgs' initialize_params: Parameters for a new disk that will be created
alongside the new instance. Either `initialize_params` or `source` must be set.
Structure is documented below.
:param str kms_key_self_link: The self_link of the encryption key that is
stored in Google Cloud KMS to encrypt this disk. Only one of `kms_key_self_link`
and `disk_encryption_key_raw` may be set.
:param str mode: Either "READ_ONLY" or "READ_WRITE", defaults to "READ_WRITE"
If you have a persistent disk with data that you want to share
between multiple instances, detach it from any read-write instances and
attach it to one or more instances in read-only mode.
:param str source: The name or self_link of the disk to attach to this instance.
"""
if auto_delete is not None:
pulumi.set(__self__, "auto_delete", auto_delete)
if device_name is not None:
pulumi.set(__self__, "device_name", device_name)
if disk_encryption_key_raw is not None:
pulumi.set(__self__, "disk_encryption_key_raw", disk_encryption_key_raw)
if disk_encryption_key_sha256 is not None:
pulumi.set(__self__, "disk_encryption_key_sha256", disk_encryption_key_sha256)
if initialize_params is not None:
pulumi.set(__self__, "initialize_params", initialize_params)
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if source is not None:
pulumi.set(__self__, "source", source)
@property
@pulumi.getter(name="autoDelete")
def auto_delete(self) -> Optional[bool]:
"""
Whether the disk will be auto-deleted when the instance
is deleted. Defaults to true.
"""
return pulumi.get(self, "auto_delete")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> Optional[str]:
"""
Name with which the attached disk will be accessible
under `/dev/disk/by-id/google-*`
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKeyRaw")
def disk_encryption_key_raw(self) -> Optional[str]:
"""
A 256-bit [customer-supplied encryption key]
(https://cloud.google.com/compute/docs/disks/customer-supplied-encryption),
encoded in [RFC 4648 base64](https://tools.ietf.org/html/rfc4648#section-4)
to encrypt this disk. Only one of `kms_key_self_link` and `disk_encryption_key_raw` may be set.
"""
return pulumi.get(self, "disk_encryption_key_raw")
@property
@pulumi.getter(name="diskEncryptionKeySha256")
def disk_encryption_key_sha256(self) -> Optional[str]:
return pulumi.get(self, "disk_encryption_key_sha256")
@property
@pulumi.getter(name="initializeParams")
def initialize_params(self) -> Optional['outputs.InstanceBootDiskInitializeParams']:
"""
Parameters for a new disk that will be created
alongside the new instance. Either `initialize_params` or `source` must be set.
Structure is documented below.
"""
return pulumi.get(self, "initialize_params")
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
"""
The self_link of the encryption key that is
stored in Google Cloud KMS to encrypt this disk. Only one of `kms_key_self_link`
and `disk_encryption_key_raw` may be set.
"""
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
Either "READ_ONLY" or "READ_WRITE", defaults to "READ_WRITE"
If you have a persistent disk with data that you want to share
between multiple instances, detach it from any read-write instances and
attach it to one or more instances in read-only mode.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter
def source(self) -> Optional[str]:
"""
The name or self_link of the disk to attach to this instance.
"""
return pulumi.get(self, "source")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceBootDiskInitializeParams(dict):
def __init__(__self__, *,
image: Optional[str] = None,
labels: Optional[Mapping[str, Any]] = None,
size: Optional[float] = None,
type: Optional[str] = None):
"""
:param str image: The image from which to initialize this disk. This can be
one of: the image's `self_link`, `projects/{project}/global/images/{image}`,
`projects/{project}/global/images/family/{family}`, `global/images/{image}`,
`global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
`{project}/{image}`, `{family}`, or `{image}`. If referred by family, the
images names must include the family name. If they don't, use the
[compute.Image data source](https://www.terraform.io/docs/providers/google/d/compute_image.html).
For instance, the image `centos-6-v20180104` includes its family name `centos-6`.
These images can be referred by family name here.
:param Mapping[str, Any] labels: A map of key/value label pairs to assign to the instance.
:param float size: The size of the image in gigabytes. If not specified, it
will inherit the size of its base image.
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
if image is not None:
pulumi.set(__self__, "image", image)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if size is not None:
pulumi.set(__self__, "size", size)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def image(self) -> Optional[str]:
"""
The image from which to initialize this disk. This can be
one of: the image's `self_link`, `projects/{project}/global/images/{image}`,
`projects/{project}/global/images/family/{family}`, `global/images/{image}`,
`global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
`{project}/{image}`, `{family}`, or `{image}`. If referred by family, the
images names must include the family name. If they don't, use the
[compute.Image data source](https://www.terraform.io/docs/providers/google/d/compute_image.html).
For instance, the image `centos-6-v20180104` includes its family name `centos-6`.
These images can be referred by family name here.
"""
return pulumi.get(self, "image")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, Any]]:
"""
A map of key/value label pairs to assign to the instance.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def size(self) -> Optional[float]:
"""
The size of the image in gigabytes. If not specified, it
will inherit the size of its base image.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceConfidentialInstanceConfig(dict):
def __init__(__self__, *,
enable_confidential_compute: bool):
pulumi.set(__self__, "enable_confidential_compute", enable_confidential_compute)
@property
@pulumi.getter(name="enableConfidentialCompute")
def enable_confidential_compute(self) -> bool:
return pulumi.get(self, "enable_confidential_compute")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateAttachedDisk(dict):
def __init__(__self__, *,
source: str,
device_name: Optional[str] = None,
disk_encryption_key_raw: Optional[str] = None,
disk_encryption_key_sha256: Optional[str] = None,
kms_key_self_link: Optional[str] = None,
mode: Optional[str] = None):
pulumi.set(__self__, "source", source)
if device_name is not None:
pulumi.set(__self__, "device_name", device_name)
if disk_encryption_key_raw is not None:
pulumi.set(__self__, "disk_encryption_key_raw", disk_encryption_key_raw)
if disk_encryption_key_sha256 is not None:
pulumi.set(__self__, "disk_encryption_key_sha256", disk_encryption_key_sha256)
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if mode is not None:
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def source(self) -> str:
return pulumi.get(self, "source")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> Optional[str]:
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKeyRaw")
def disk_encryption_key_raw(self) -> Optional[str]:
return pulumi.get(self, "disk_encryption_key_raw")
@property
@pulumi.getter(name="diskEncryptionKeySha256")
def disk_encryption_key_sha256(self) -> Optional[str]:
return pulumi.get(self, "disk_encryption_key_sha256")
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
return pulumi.get(self, "mode")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateBootDisk(dict):
def __init__(__self__, *,
auto_delete: Optional[bool] = None,
device_name: Optional[str] = None,
disk_encryption_key_raw: Optional[str] = None,
disk_encryption_key_sha256: Optional[str] = None,
initialize_params: Optional['outputs.InstanceFromTemplateBootDiskInitializeParams'] = None,
kms_key_self_link: Optional[str] = None,
mode: Optional[str] = None,
source: Optional[str] = None):
if auto_delete is not None:
pulumi.set(__self__, "auto_delete", auto_delete)
if device_name is not None:
pulumi.set(__self__, "device_name", device_name)
if disk_encryption_key_raw is not None:
pulumi.set(__self__, "disk_encryption_key_raw", disk_encryption_key_raw)
if disk_encryption_key_sha256 is not None:
pulumi.set(__self__, "disk_encryption_key_sha256", disk_encryption_key_sha256)
if initialize_params is not None:
pulumi.set(__self__, "initialize_params", initialize_params)
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if source is not None:
pulumi.set(__self__, "source", source)
@property
@pulumi.getter(name="autoDelete")
def auto_delete(self) -> Optional[bool]:
return pulumi.get(self, "auto_delete")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> Optional[str]:
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKeyRaw")
def disk_encryption_key_raw(self) -> Optional[str]:
return pulumi.get(self, "disk_encryption_key_raw")
@property
@pulumi.getter(name="diskEncryptionKeySha256")
def disk_encryption_key_sha256(self) -> Optional[str]:
return pulumi.get(self, "disk_encryption_key_sha256")
@property
@pulumi.getter(name="initializeParams")
def initialize_params(self) -> Optional['outputs.InstanceFromTemplateBootDiskInitializeParams']:
return pulumi.get(self, "initialize_params")
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
return pulumi.get(self, "mode")
@property
@pulumi.getter
def source(self) -> Optional[str]:
return pulumi.get(self, "source")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateBootDiskInitializeParams(dict):
def __init__(__self__, *,
image: Optional[str] = None,
labels: Optional[Mapping[str, Any]] = None,
size: Optional[float] = None,
type: Optional[str] = None):
if image is not None:
pulumi.set(__self__, "image", image)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if size is not None:
pulumi.set(__self__, "size", size)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def image(self) -> Optional[str]:
return pulumi.get(self, "image")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, Any]]:
return pulumi.get(self, "labels")
@property
@pulumi.getter
def size(self) -> Optional[float]:
return pulumi.get(self, "size")
@property
@pulumi.getter
def type(self) -> Optional[str]:
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateConfidentialInstanceConfig(dict):
def __init__(__self__, *,
enable_confidential_compute: bool):
pulumi.set(__self__, "enable_confidential_compute", enable_confidential_compute)
@property
@pulumi.getter(name="enableConfidentialCompute")
def enable_confidential_compute(self) -> bool:
return pulumi.get(self, "enable_confidential_compute")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateGuestAccelerator(dict):
def __init__(__self__, *,
count: float,
type: str):
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> float:
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateNetworkInterface(dict):
def __init__(__self__, *,
access_configs: Optional[List['outputs.InstanceFromTemplateNetworkInterfaceAccessConfig']] = None,
alias_ip_ranges: Optional[List['outputs.InstanceFromTemplateNetworkInterfaceAliasIpRange']] = None,
name: Optional[str] = None,
network: Optional[str] = None,
network_ip: Optional[str] = None,
subnetwork: Optional[str] = None,
subnetwork_project: Optional[str] = None):
"""
:param str name: A unique name for the resource, required by GCE.
Changing this forces a new resource to be created.
"""
if access_configs is not None:
pulumi.set(__self__, "access_configs", access_configs)
if alias_ip_ranges is not None:
pulumi.set(__self__, "alias_ip_ranges", alias_ip_ranges)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if network_ip is not None:
pulumi.set(__self__, "network_ip", network_ip)
if subnetwork is not None:
pulumi.set(__self__, "subnetwork", subnetwork)
if subnetwork_project is not None:
pulumi.set(__self__, "subnetwork_project", subnetwork_project)
@property
@pulumi.getter(name="accessConfigs")
def access_configs(self) -> Optional[List['outputs.InstanceFromTemplateNetworkInterfaceAccessConfig']]:
return pulumi.get(self, "access_configs")
@property
@pulumi.getter(name="aliasIpRanges")
def alias_ip_ranges(self) -> Optional[List['outputs.InstanceFromTemplateNetworkInterfaceAliasIpRange']]:
return pulumi.get(self, "alias_ip_ranges")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
A unique name for the resource, required by GCE.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> Optional[str]:
return pulumi.get(self, "network")
@property
@pulumi.getter(name="networkIp")
def network_ip(self) -> Optional[str]:
return pulumi.get(self, "network_ip")
@property
@pulumi.getter
def subnetwork(self) -> Optional[str]:
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter(name="subnetworkProject")
def subnetwork_project(self) -> Optional[str]:
return pulumi.get(self, "subnetwork_project")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateNetworkInterfaceAccessConfig(dict):
def __init__(__self__, *,
nat_ip: Optional[str] = None,
network_tier: Optional[str] = None,
public_ptr_domain_name: Optional[str] = None):
if nat_ip is not None:
pulumi.set(__self__, "nat_ip", nat_ip)
if network_tier is not None:
pulumi.set(__self__, "network_tier", network_tier)
if public_ptr_domain_name is not None:
pulumi.set(__self__, "public_ptr_domain_name", public_ptr_domain_name)
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> Optional[str]:
return pulumi.get(self, "nat_ip")
@property
@pulumi.getter(name="networkTier")
def network_tier(self) -> Optional[str]:
return pulumi.get(self, "network_tier")
@property
@pulumi.getter(name="publicPtrDomainName")
def public_ptr_domain_name(self) -> Optional[str]:
return pulumi.get(self, "public_ptr_domain_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateNetworkInterfaceAliasIpRange(dict):
def __init__(__self__, *,
ip_cidr_range: str,
subnetwork_range_name: Optional[str] = None):
pulumi.set(__self__, "ip_cidr_range", ip_cidr_range)
if subnetwork_range_name is not None:
pulumi.set(__self__, "subnetwork_range_name", subnetwork_range_name)
@property
@pulumi.getter(name="ipCidrRange")
def ip_cidr_range(self) -> str:
return pulumi.get(self, "ip_cidr_range")
@property
@pulumi.getter(name="subnetworkRangeName")
def subnetwork_range_name(self) -> Optional[str]:
return pulumi.get(self, "subnetwork_range_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateScheduling(dict):
def __init__(__self__, *,
automatic_restart: Optional[bool] = None,
min_node_cpus: Optional[float] = None,
node_affinities: Optional[List['outputs.InstanceFromTemplateSchedulingNodeAffinity']] = None,
on_host_maintenance: Optional[str] = None,
preemptible: Optional[bool] = None):
if automatic_restart is not None:
pulumi.set(__self__, "automatic_restart", automatic_restart)
if min_node_cpus is not None:
pulumi.set(__self__, "min_node_cpus", min_node_cpus)
if node_affinities is not None:
pulumi.set(__self__, "node_affinities", node_affinities)
if on_host_maintenance is not None:
pulumi.set(__self__, "on_host_maintenance", on_host_maintenance)
if preemptible is not None:
pulumi.set(__self__, "preemptible", preemptible)
@property
@pulumi.getter(name="automaticRestart")
def automatic_restart(self) -> Optional[bool]:
return pulumi.get(self, "automatic_restart")
@property
@pulumi.getter(name="minNodeCpus")
def min_node_cpus(self) -> Optional[float]:
return pulumi.get(self, "min_node_cpus")
@property
@pulumi.getter(name="nodeAffinities")
def node_affinities(self) -> Optional[List['outputs.InstanceFromTemplateSchedulingNodeAffinity']]:
return pulumi.get(self, "node_affinities")
@property
@pulumi.getter(name="onHostMaintenance")
def on_host_maintenance(self) -> Optional[str]:
return pulumi.get(self, "on_host_maintenance")
@property
@pulumi.getter
def preemptible(self) -> Optional[bool]:
return pulumi.get(self, "preemptible")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateSchedulingNodeAffinity(dict):
def __init__(__self__, *,
key: str,
operator: str,
values: List[str]):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> List[str]:
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateScratchDisk(dict):
def __init__(__self__, *,
interface: str):
pulumi.set(__self__, "interface", interface)
@property
@pulumi.getter
def interface(self) -> str:
return pulumi.get(self, "interface")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateServiceAccount(dict):
def __init__(__self__, *,
scopes: List[str],
email: Optional[str] = None):
pulumi.set(__self__, "scopes", scopes)
if email is not None:
pulumi.set(__self__, "email", email)
@property
@pulumi.getter
def scopes(self) -> List[str]:
return pulumi.get(self, "scopes")
@property
@pulumi.getter
def email(self) -> Optional[str]:
return pulumi.get(self, "email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceFromTemplateShieldedInstanceConfig(dict):
def __init__(__self__, *,
enable_integrity_monitoring: Optional[bool] = None,
enable_secure_boot: Optional[bool] = None,
enable_vtpm: Optional[bool] = None):
if enable_integrity_monitoring is not None:
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
if enable_secure_boot is not None:
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
if enable_vtpm is not None:
pulumi.set(__self__, "enable_vtpm", enable_vtpm)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> Optional[bool]:
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> Optional[bool]:
return pulumi.get(self, "enable_secure_boot")
@property
@pulumi.getter(name="enableVtpm")
def enable_vtpm(self) -> Optional[bool]:
return pulumi.get(self, "enable_vtpm")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupManagerAutoHealingPolicies(dict):
def __init__(__self__, *,
health_check: str,
initial_delay_sec: float):
"""
:param str health_check: The health check resource that signals autohealing.
:param float initial_delay_sec: The number of seconds that the managed instance group waits before
it applies autohealing policies to new instances or recently recreated instances. Between 0 and 3600.
"""
pulumi.set(__self__, "health_check", health_check)
pulumi.set(__self__, "initial_delay_sec", initial_delay_sec)
@property
@pulumi.getter(name="healthCheck")
def health_check(self) -> str:
"""
The health check resource that signals autohealing.
"""
return pulumi.get(self, "health_check")
@property
@pulumi.getter(name="initialDelaySec")
def initial_delay_sec(self) -> float:
"""
The number of seconds that the managed instance group waits before
it applies autohealing policies to new instances or recently recreated instances. Between 0 and 3600.
"""
return pulumi.get(self, "initial_delay_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupManagerNamedPort(dict):
def __init__(__self__, *,
name: str,
port: float):
"""
:param str name: - Version name.
:param float port: The port number.
- - -
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> str:
"""
- Version name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> float:
"""
The port number.
- - -
"""
return pulumi.get(self, "port")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupManagerStatefulDisk(dict):
def __init__(__self__, *,
device_name: str,
delete_rule: Optional[str] = None):
"""
:param str device_name: , The device name of the disk to be attached.
:param str delete_rule: , A value that prescribes what should happen to the stateful disk when the VM instance is deleted. The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`. `NEVER` detatch the disk when the VM is deleted, but not delete the disk. `ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently deleted from the instance group. The default is `NEVER`.
"""
pulumi.set(__self__, "device_name", device_name)
if delete_rule is not None:
pulumi.set(__self__, "delete_rule", delete_rule)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
, The device name of the disk to be attached.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="deleteRule")
def delete_rule(self) -> Optional[str]:
"""
, A value that prescribes what should happen to the stateful disk when the VM instance is deleted. The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`. `NEVER` detatch the disk when the VM is deleted, but not delete the disk. `ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently deleted from the instance group. The default is `NEVER`.
"""
return pulumi.get(self, "delete_rule")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupManagerUpdatePolicy(dict):
def __init__(__self__, *,
minimal_action: str,
type: str,
max_surge_fixed: Optional[float] = None,
max_surge_percent: Optional[float] = None,
max_unavailable_fixed: Optional[float] = None,
max_unavailable_percent: Optional[float] = None,
min_ready_sec: Optional[float] = None):
"""
:param str minimal_action: - Minimal action to be taken on an instance. You can specify either `RESTART` to restart existing instances or `REPLACE` to delete and create new instances from the target template. If you specify a `RESTART`, the Updater will attempt to perform that action only. However, if the Updater determines that the minimal action you specify is not enough to perform the update, it might perform a more disruptive action.
:param str type: - The type of update process. You can specify either `PROACTIVE` so that the instance group manager proactively executes actions in order to bring instances to their target versions or `OPPORTUNISTIC` so that no action is proactively executed but the update will be performed as part of other actions (for example, resizes or recreateInstances calls).
:param float max_surge_fixed: , The maximum number of instances that can be created above the specified targetSize during the update process. Conflicts with `max_surge_percent`. If neither is set, defaults to 1
:param float max_surge_percent: , The maximum number of instances(calculated as percentage) that can be created above the specified targetSize during the update process. Conflicts with `max_surge_fixed`.
:param float max_unavailable_fixed: , The maximum number of instances that can be unavailable during the update process. Conflicts with `max_unavailable_percent`. If neither is set, defaults to 1
:param float max_unavailable_percent: , The maximum number of instances(calculated as percentage) that can be unavailable during the update process. Conflicts with `max_unavailable_fixed`.
:param float min_ready_sec: , Minimum number of seconds to wait for after a newly created instance becomes available. This value must be from range [0, 3600]
- - -
"""
pulumi.set(__self__, "minimal_action", minimal_action)
pulumi.set(__self__, "type", type)
if max_surge_fixed is not None:
pulumi.set(__self__, "max_surge_fixed", max_surge_fixed)
if max_surge_percent is not None:
pulumi.set(__self__, "max_surge_percent", max_surge_percent)
if max_unavailable_fixed is not None:
pulumi.set(__self__, "max_unavailable_fixed", max_unavailable_fixed)
if max_unavailable_percent is not None:
pulumi.set(__self__, "max_unavailable_percent", max_unavailable_percent)
if min_ready_sec is not None:
pulumi.set(__self__, "min_ready_sec", min_ready_sec)
@property
@pulumi.getter(name="minimalAction")
def minimal_action(self) -> str:
"""
- Minimal action to be taken on an instance. You can specify either `RESTART` to restart existing instances or `REPLACE` to delete and create new instances from the target template. If you specify a `RESTART`, the Updater will attempt to perform that action only. However, if the Updater determines that the minimal action you specify is not enough to perform the update, it might perform a more disruptive action.
"""
return pulumi.get(self, "minimal_action")
@property
@pulumi.getter
def type(self) -> str:
"""
- The type of update process. You can specify either `PROACTIVE` so that the instance group manager proactively executes actions in order to bring instances to their target versions or `OPPORTUNISTIC` so that no action is proactively executed but the update will be performed as part of other actions (for example, resizes or recreateInstances calls).
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="maxSurgeFixed")
def max_surge_fixed(self) -> Optional[float]:
"""
, The maximum number of instances that can be created above the specified targetSize during the update process. Conflicts with `max_surge_percent`. If neither is set, defaults to 1
"""
return pulumi.get(self, "max_surge_fixed")
@property
@pulumi.getter(name="maxSurgePercent")
def max_surge_percent(self) -> Optional[float]:
"""
, The maximum number of instances(calculated as percentage) that can be created above the specified targetSize during the update process. Conflicts with `max_surge_fixed`.
"""
return pulumi.get(self, "max_surge_percent")
@property
@pulumi.getter(name="maxUnavailableFixed")
def max_unavailable_fixed(self) -> Optional[float]:
"""
, The maximum number of instances that can be unavailable during the update process. Conflicts with `max_unavailable_percent`. If neither is set, defaults to 1
"""
return pulumi.get(self, "max_unavailable_fixed")
@property
@pulumi.getter(name="maxUnavailablePercent")
def max_unavailable_percent(self) -> Optional[float]:
"""
, The maximum number of instances(calculated as percentage) that can be unavailable during the update process. Conflicts with `max_unavailable_fixed`.
"""
return pulumi.get(self, "max_unavailable_percent")
@property
@pulumi.getter(name="minReadySec")
def min_ready_sec(self) -> Optional[float]:
"""
, Minimum number of seconds to wait for after a newly created instance becomes available. This value must be from range [0, 3600]
- - -
"""
return pulumi.get(self, "min_ready_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupManagerVersion(dict):
def __init__(__self__, *,
instance_template: str,
name: Optional[str] = None,
target_size: Optional['outputs.InstanceGroupManagerVersionTargetSize'] = None):
"""
:param str instance_template: - The full URL to an instance template from which all new instances of this version will be created.
:param str name: - Version name.
:param 'InstanceGroupManagerVersionTargetSizeArgs' target_size: - The number of instances calculated as a fixed number or a percentage depending on the settings. Structure is documented below.
"""
pulumi.set(__self__, "instance_template", instance_template)
if name is not None:
pulumi.set(__self__, "name", name)
if target_size is not None:
pulumi.set(__self__, "target_size", target_size)
@property
@pulumi.getter(name="instanceTemplate")
def instance_template(self) -> str:
"""
- The full URL to an instance template from which all new instances of this version will be created.
"""
return pulumi.get(self, "instance_template")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
- Version name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="targetSize")
def target_size(self) -> Optional['outputs.InstanceGroupManagerVersionTargetSize']:
"""
- The number of instances calculated as a fixed number or a percentage depending on the settings. Structure is documented below.
"""
return pulumi.get(self, "target_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupManagerVersionTargetSize(dict):
def __init__(__self__, *,
fixed: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float fixed: , The number of instances which are managed for this version. Conflicts with `percent`.
:param float percent: , The number of instances (calculated as percentage) which are managed for this version. Conflicts with `fixed`.
Note that when using `percent`, rounding will be in favor of explicitly set `target_size` values; a managed instance group with 2 instances and 2 `version`s,
one of which has a `target_size.percent` of `60` will create 2 instances of that `version`.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def fixed(self) -> Optional[float]:
"""
, The number of instances which are managed for this version. Conflicts with `percent`.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
, The number of instances (calculated as percentage) which are managed for this version. Conflicts with `fixed`.
Note that when using `percent`, rounding will be in favor of explicitly set `target_size` values; a managed instance group with 2 instances and 2 `version`s,
one of which has a `target_size.percent` of `60` will create 2 instances of that `version`.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGroupNamedPort(dict):
def __init__(__self__, *,
name: str,
port: float):
"""
:param str name: The name which the port will be mapped to.
:param float port: The port number to map the name to.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> str:
"""
The name which the port will be mapped to.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> float:
"""
The port number to map the name to.
"""
return pulumi.get(self, "port")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceGuestAccelerator(dict):
def __init__(__self__, *,
count: float,
type: str):
"""
:param float count: The number of the guest accelerator cards exposed to this instance.
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> float:
"""
The number of the guest accelerator cards exposed to this instance.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceIAMBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str title: A title for the expression, i.e. a short string describing its purpose.
:param str description: An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
"""
A title for the expression, i.e. a short string describing its purpose.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceIAMMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str title: A title for the expression, i.e. a short string describing its purpose.
:param str description: An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
"""
A title for the expression, i.e. a short string describing its purpose.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceNetworkInterface(dict):
def __init__(__self__, *,
access_configs: Optional[List['outputs.InstanceNetworkInterfaceAccessConfig']] = None,
alias_ip_ranges: Optional[List['outputs.InstanceNetworkInterfaceAliasIpRange']] = None,
name: Optional[str] = None,
network: Optional[str] = None,
network_ip: Optional[str] = None,
subnetwork: Optional[str] = None,
subnetwork_project: Optional[str] = None):
"""
:param List['InstanceNetworkInterfaceAccessConfigArgs'] access_configs: Access configurations, i.e. IPs via which this
instance can be accessed via the Internet. Omit to ensure that the instance
is not accessible from the Internet. If omitted, ssh will not
work unless this provider can send traffic to the instance's network (e.g. via
tunnel or because it is running on another cloud instance on that network).
This block can be repeated multiple times. Structure documented below.
:param List['InstanceNetworkInterfaceAliasIpRangeArgs'] alias_ip_ranges: An
array of alias IP ranges for this network interface. Can only be specified for network
interfaces on subnet-mode networks. Structure documented below.
:param str name: A unique name for the resource, required by GCE.
Changing this forces a new resource to be created.
:param str network: The name or self_link of the network to attach this interface to.
Either `network` or `subnetwork` must be provided.
:param str network_ip: The private IP address to assign to the instance. If
empty, the address will be automatically assigned.
:param str subnetwork: The name or self_link of the subnetwork to attach this
interface to. The subnetwork must exist in the same region this instance will be
created in. Either `network` or `subnetwork` must be provided.
:param str subnetwork_project: The project in which the subnetwork belongs.
If the `subnetwork` is a self_link, this field is ignored in favor of the project
defined in the subnetwork self_link. If the `subnetwork` is a name and this
field is not provided, the provider project is used.
"""
if access_configs is not None:
pulumi.set(__self__, "access_configs", access_configs)
if alias_ip_ranges is not None:
pulumi.set(__self__, "alias_ip_ranges", alias_ip_ranges)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if network_ip is not None:
pulumi.set(__self__, "network_ip", network_ip)
if subnetwork is not None:
pulumi.set(__self__, "subnetwork", subnetwork)
if subnetwork_project is not None:
pulumi.set(__self__, "subnetwork_project", subnetwork_project)
@property
@pulumi.getter(name="accessConfigs")
def access_configs(self) -> Optional[List['outputs.InstanceNetworkInterfaceAccessConfig']]:
"""
Access configurations, i.e. IPs via which this
instance can be accessed via the Internet. Omit to ensure that the instance
is not accessible from the Internet. If omitted, ssh will not
work unless this provider can send traffic to the instance's network (e.g. via
tunnel or because it is running on another cloud instance on that network).
This block can be repeated multiple times. Structure documented below.
"""
return pulumi.get(self, "access_configs")
@property
@pulumi.getter(name="aliasIpRanges")
def alias_ip_ranges(self) -> Optional[List['outputs.InstanceNetworkInterfaceAliasIpRange']]:
"""
An
array of alias IP ranges for this network interface. Can only be specified for network
interfaces on subnet-mode networks. Structure documented below.
"""
return pulumi.get(self, "alias_ip_ranges")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
A unique name for the resource, required by GCE.
Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> Optional[str]:
"""
The name or self_link of the network to attach this interface to.
Either `network` or `subnetwork` must be provided.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="networkIp")
def network_ip(self) -> Optional[str]:
"""
The private IP address to assign to the instance. If
empty, the address will be automatically assigned.
"""
return pulumi.get(self, "network_ip")
@property
@pulumi.getter
def subnetwork(self) -> Optional[str]:
"""
The name or self_link of the subnetwork to attach this
interface to. The subnetwork must exist in the same region this instance will be
created in. Either `network` or `subnetwork` must be provided.
"""
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter(name="subnetworkProject")
def subnetwork_project(self) -> Optional[str]:
"""
The project in which the subnetwork belongs.
If the `subnetwork` is a self_link, this field is ignored in favor of the project
defined in the subnetwork self_link. If the `subnetwork` is a name and this
field is not provided, the provider project is used.
"""
return pulumi.get(self, "subnetwork_project")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceNetworkInterfaceAccessConfig(dict):
def __init__(__self__, *,
nat_ip: Optional[str] = None,
network_tier: Optional[str] = None,
public_ptr_domain_name: Optional[str] = None):
"""
:param str nat_ip: The IP address that will be 1:1 mapped to the instance's
network ip. If not given, one will be generated.
:param str network_tier: The [networking tier][network-tier] used for configuring this instance.
This field can take the following values: PREMIUM or STANDARD. If this field is
not specified, it is assumed to be PREMIUM.
:param str public_ptr_domain_name: The DNS domain name for the public PTR record.
To set this field on an instance, you must be verified as the owner of the domain.
See [the docs](https://cloud.google.com/compute/docs/instances/create-ptr-record) for how
to become verified as a domain owner.
"""
if nat_ip is not None:
pulumi.set(__self__, "nat_ip", nat_ip)
if network_tier is not None:
pulumi.set(__self__, "network_tier", network_tier)
if public_ptr_domain_name is not None:
pulumi.set(__self__, "public_ptr_domain_name", public_ptr_domain_name)
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> Optional[str]:
"""
The IP address that will be 1:1 mapped to the instance's
network ip. If not given, one will be generated.
"""
return pulumi.get(self, "nat_ip")
@property
@pulumi.getter(name="networkTier")
def network_tier(self) -> Optional[str]:
"""
The [networking tier][network-tier] used for configuring this instance.
This field can take the following values: PREMIUM or STANDARD. If this field is
not specified, it is assumed to be PREMIUM.
"""
return pulumi.get(self, "network_tier")
@property
@pulumi.getter(name="publicPtrDomainName")
def public_ptr_domain_name(self) -> Optional[str]:
"""
The DNS domain name for the public PTR record.
To set this field on an instance, you must be verified as the owner of the domain.
See [the docs](https://cloud.google.com/compute/docs/instances/create-ptr-record) for how
to become verified as a domain owner.
"""
return pulumi.get(self, "public_ptr_domain_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceNetworkInterfaceAliasIpRange(dict):
def __init__(__self__, *,
ip_cidr_range: str,
subnetwork_range_name: Optional[str] = None):
"""
:param str ip_cidr_range: The IP CIDR range represented by this alias IP range. This IP CIDR range
must belong to the specified subnetwork and cannot contain IP addresses reserved by
system or used by other network interfaces. This range may be a single IP address
(e.g. 10.2.3.4), a netmask (e.g. /24) or a CIDR format string (e.g. 10.1.2.0/24).
:param str subnetwork_range_name: The subnetwork secondary range name specifying
the secondary range from which to allocate the IP CIDR range for this alias IP
range. If left unspecified, the primary range of the subnetwork will be used.
"""
pulumi.set(__self__, "ip_cidr_range", ip_cidr_range)
if subnetwork_range_name is not None:
pulumi.set(__self__, "subnetwork_range_name", subnetwork_range_name)
@property
@pulumi.getter(name="ipCidrRange")
def ip_cidr_range(self) -> str:
"""
The IP CIDR range represented by this alias IP range. This IP CIDR range
must belong to the specified subnetwork and cannot contain IP addresses reserved by
system or used by other network interfaces. This range may be a single IP address
(e.g. 10.2.3.4), a netmask (e.g. /24) or a CIDR format string (e.g. 10.1.2.0/24).
"""
return pulumi.get(self, "ip_cidr_range")
@property
@pulumi.getter(name="subnetworkRangeName")
def subnetwork_range_name(self) -> Optional[str]:
"""
The subnetwork secondary range name specifying
the secondary range from which to allocate the IP CIDR range for this alias IP
range. If left unspecified, the primary range of the subnetwork will be used.
"""
return pulumi.get(self, "subnetwork_range_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceScheduling(dict):
def __init__(__self__, *,
automatic_restart: Optional[bool] = None,
min_node_cpus: Optional[float] = None,
node_affinities: Optional[List['outputs.InstanceSchedulingNodeAffinity']] = None,
on_host_maintenance: Optional[str] = None,
preemptible: Optional[bool] = None):
"""
:param bool automatic_restart: Specifies if the instance should be
restarted if it was terminated by Compute Engine (not a user).
Defaults to true.
:param List['InstanceSchedulingNodeAffinityArgs'] node_affinities: Specifies node affinities or anti-affinities
to determine which sole-tenant nodes your instances and managed instance
groups will use as host systems. Read more on sole-tenant node creation
[here](https://cloud.google.com/compute/docs/nodes/create-nodes).
Structure documented below.
:param str on_host_maintenance: Describes maintenance behavior for the
instance. Can be MIGRATE or TERMINATE, for more info, read
[here](https://cloud.google.com/compute/docs/instances/setting-instance-scheduling-options).
:param bool preemptible: Specifies if the instance is preemptible.
If this field is set to true, then `automatic_restart` must be
set to false. Defaults to false.
"""
if automatic_restart is not None:
pulumi.set(__self__, "automatic_restart", automatic_restart)
if min_node_cpus is not None:
pulumi.set(__self__, "min_node_cpus", min_node_cpus)
if node_affinities is not None:
pulumi.set(__self__, "node_affinities", node_affinities)
if on_host_maintenance is not None:
pulumi.set(__self__, "on_host_maintenance", on_host_maintenance)
if preemptible is not None:
pulumi.set(__self__, "preemptible", preemptible)
@property
@pulumi.getter(name="automaticRestart")
def automatic_restart(self) -> Optional[bool]:
"""
Specifies if the instance should be
restarted if it was terminated by Compute Engine (not a user).
Defaults to true.
"""
return pulumi.get(self, "automatic_restart")
@property
@pulumi.getter(name="minNodeCpus")
def min_node_cpus(self) -> Optional[float]:
return pulumi.get(self, "min_node_cpus")
@property
@pulumi.getter(name="nodeAffinities")
def node_affinities(self) -> Optional[List['outputs.InstanceSchedulingNodeAffinity']]:
"""
Specifies node affinities or anti-affinities
to determine which sole-tenant nodes your instances and managed instance
groups will use as host systems. Read more on sole-tenant node creation
[here](https://cloud.google.com/compute/docs/nodes/create-nodes).
Structure documented below.
"""
return pulumi.get(self, "node_affinities")
@property
@pulumi.getter(name="onHostMaintenance")
def on_host_maintenance(self) -> Optional[str]:
"""
Describes maintenance behavior for the
instance. Can be MIGRATE or TERMINATE, for more info, read
[here](https://cloud.google.com/compute/docs/instances/setting-instance-scheduling-options).
"""
return pulumi.get(self, "on_host_maintenance")
@property
@pulumi.getter
def preemptible(self) -> Optional[bool]:
"""
Specifies if the instance is preemptible.
If this field is set to true, then `automatic_restart` must be
set to false. Defaults to false.
"""
return pulumi.get(self, "preemptible")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceSchedulingNodeAffinity(dict):
def __init__(__self__, *,
key: str,
operator: str,
values: List[str]):
"""
:param str key: The key for the node affinity label.
:param str operator: The operator. Can be `IN` for node-affinities
or `NOT_IN` for anti-affinities.
:param List[str] values: The values for the node affinity label.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The key for the node affinity label.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
The operator. Can be `IN` for node-affinities
or `NOT_IN` for anti-affinities.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> List[str]:
"""
The values for the node affinity label.
"""
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceScratchDisk(dict):
def __init__(__self__, *,
interface: str):
"""
:param str interface: The disk interface to use for attaching this disk; either SCSI or NVME.
"""
pulumi.set(__self__, "interface", interface)
@property
@pulumi.getter
def interface(self) -> str:
"""
The disk interface to use for attaching this disk; either SCSI or NVME.
"""
return pulumi.get(self, "interface")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceServiceAccount(dict):
def __init__(__self__, *,
scopes: List[str],
email: Optional[str] = None):
"""
:param List[str] scopes: A list of service scopes. Both OAuth2 URLs and gcloud
short names are supported. To allow full access to all Cloud APIs, use the
`cloud-platform` scope. See a complete list of scopes [here](https://cloud.google.com/sdk/gcloud/reference/alpha/compute/instances/set-scopes#--scopes).
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
:param str email: The service account e-mail address. If not given, the
default Google Compute Engine service account is used.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
pulumi.set(__self__, "scopes", scopes)
if email is not None:
pulumi.set(__self__, "email", email)
@property
@pulumi.getter
def scopes(self) -> List[str]:
"""
A list of service scopes. Both OAuth2 URLs and gcloud
short names are supported. To allow full access to all Cloud APIs, use the
`cloud-platform` scope. See a complete list of scopes [here](https://cloud.google.com/sdk/gcloud/reference/alpha/compute/instances/set-scopes#--scopes).
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
return pulumi.get(self, "scopes")
@property
@pulumi.getter
def email(self) -> Optional[str]:
"""
The service account e-mail address. If not given, the
default Google Compute Engine service account is used.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
return pulumi.get(self, "email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceShieldedInstanceConfig(dict):
def __init__(__self__, *,
enable_integrity_monitoring: Optional[bool] = None,
enable_secure_boot: Optional[bool] = None,
enable_vtpm: Optional[bool] = None):
"""
:param bool enable_integrity_monitoring: -- Compare the most recent boot measurements to the integrity policy baseline and return a pair of pass/fail results depending on whether they match or not. Defaults to true.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
:param bool enable_secure_boot: -- Verify the digital signature of all boot components, and halt the boot process if signature verification fails. Defaults to false.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
:param bool enable_vtpm: -- Use a virtualized trusted platform module, which is a specialized computer chip you can use to encrypt objects like keys and certificates. Defaults to true.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
if enable_integrity_monitoring is not None:
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
if enable_secure_boot is not None:
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
if enable_vtpm is not None:
pulumi.set(__self__, "enable_vtpm", enable_vtpm)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> Optional[bool]:
"""
-- Compare the most recent boot measurements to the integrity policy baseline and return a pair of pass/fail results depending on whether they match or not. Defaults to true.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> Optional[bool]:
"""
-- Verify the digital signature of all boot components, and halt the boot process if signature verification fails. Defaults to false.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
return pulumi.get(self, "enable_secure_boot")
@property
@pulumi.getter(name="enableVtpm")
def enable_vtpm(self) -> Optional[bool]:
"""
-- Use a virtualized trusted platform module, which is a specialized computer chip you can use to encrypt objects like keys and certificates. Defaults to true.
**Note**: `allow_stopping_for_update` must be set to true or your instance must have a `desired_status` of `TERMINATED` in order to update this field.
"""
return pulumi.get(self, "enable_vtpm")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateConfidentialInstanceConfig(dict):
def __init__(__self__, *,
enable_confidential_compute: bool):
pulumi.set(__self__, "enable_confidential_compute", enable_confidential_compute)
@property
@pulumi.getter(name="enableConfidentialCompute")
def enable_confidential_compute(self) -> bool:
return pulumi.get(self, "enable_confidential_compute")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateDisk(dict):
def __init__(__self__, *,
auto_delete: Optional[bool] = None,
boot: Optional[bool] = None,
device_name: Optional[str] = None,
disk_encryption_key: Optional['outputs.InstanceTemplateDiskDiskEncryptionKey'] = None,
disk_name: Optional[str] = None,
disk_size_gb: Optional[float] = None,
disk_type: Optional[str] = None,
interface: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
mode: Optional[str] = None,
source: Optional[str] = None,
source_image: Optional[str] = None,
type: Optional[str] = None):
"""
:param bool auto_delete: Whether or not the disk should be auto-deleted.
This defaults to true.
:param bool boot: Indicates that this is a boot disk.
:param str device_name: A unique device name that is reflected into the
/dev/ tree of a Linux operating system running within the instance. If not
specified, the server chooses a default device name to apply to this disk.
:param 'InstanceTemplateDiskDiskEncryptionKeyArgs' disk_encryption_key: Encrypts or decrypts a disk using a customer-supplied encryption key.
:param str disk_name: Name of the disk. When not provided, this defaults
to the name of the instance.
:param float disk_size_gb: The size of the image in gigabytes. If not
specified, it will inherit the size of its base image. For SCRATCH disks,
the size must be exactly 375GB.
:param str disk_type: The GCE disk type. Can be either `"pd-ssd"`,
`"local-ssd"`, `"pd-balanced"` or `"pd-standard"`.
:param str interface: Specifies the disk interface to use for attaching this disk,
which is either SCSI or NVME. The default is SCSI. Persistent disks must always use SCSI
and the request will fail if you attempt to attach a persistent disk in any other format
than SCSI. Local SSDs can use either NVME or SCSI.
:param Mapping[str, str] labels: A set of key/value label pairs to assign to instances
created from this template,
:param str mode: The mode in which to attach this disk, either READ_WRITE
or READ_ONLY. If you are attaching or creating a boot disk, this must
read-write mode.
:param str source: The name (**not self_link**)
of the disk (such as those managed by `compute.Disk`) to attach.
> **Note:** Either `source` or `source_image` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details.
:param str source_image: The image from which to
initialize this disk. This can be one of: the image's `self_link`,
`projects/{project}/global/images/{image}`,
`projects/{project}/global/images/family/{family}`, `global/images/{image}`,
`global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
`{project}/{image}`, `{family}`, or `{image}`.
> **Note:** Either `source` or `source_image` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details.
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
if auto_delete is not None:
pulumi.set(__self__, "auto_delete", auto_delete)
if boot is not None:
pulumi.set(__self__, "boot", boot)
if device_name is not None:
pulumi.set(__self__, "device_name", device_name)
if disk_encryption_key is not None:
pulumi.set(__self__, "disk_encryption_key", disk_encryption_key)
if disk_name is not None:
pulumi.set(__self__, "disk_name", disk_name)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if disk_type is not None:
pulumi.set(__self__, "disk_type", disk_type)
if interface is not None:
pulumi.set(__self__, "interface", interface)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if source is not None:
pulumi.set(__self__, "source", source)
if source_image is not None:
pulumi.set(__self__, "source_image", source_image)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="autoDelete")
def auto_delete(self) -> Optional[bool]:
"""
Whether or not the disk should be auto-deleted.
This defaults to true.
"""
return pulumi.get(self, "auto_delete")
@property
@pulumi.getter
def boot(self) -> Optional[bool]:
"""
Indicates that this is a boot disk.
"""
return pulumi.get(self, "boot")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> Optional[str]:
"""
A unique device name that is reflected into the
/dev/ tree of a Linux operating system running within the instance. If not
specified, the server chooses a default device name to apply to this disk.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKey")
def disk_encryption_key(self) -> Optional['outputs.InstanceTemplateDiskDiskEncryptionKey']:
"""
Encrypts or decrypts a disk using a customer-supplied encryption key.
"""
return pulumi.get(self, "disk_encryption_key")
@property
@pulumi.getter(name="diskName")
def disk_name(self) -> Optional[str]:
"""
Name of the disk. When not provided, this defaults
to the name of the instance.
"""
return pulumi.get(self, "disk_name")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[float]:
"""
The size of the image in gigabytes. If not
specified, it will inherit the size of its base image. For SCRATCH disks,
the size must be exactly 375GB.
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> Optional[str]:
"""
The GCE disk type. Can be either `"pd-ssd"`,
`"local-ssd"`, `"pd-balanced"` or `"pd-standard"`.
"""
return pulumi.get(self, "disk_type")
@property
@pulumi.getter
def interface(self) -> Optional[str]:
"""
Specifies the disk interface to use for attaching this disk,
which is either SCSI or NVME. The default is SCSI. Persistent disks must always use SCSI
and the request will fail if you attempt to attach a persistent disk in any other format
than SCSI. Local SSDs can use either NVME or SCSI.
"""
return pulumi.get(self, "interface")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
A set of key/value label pairs to assign to instances
created from this template,
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
The mode in which to attach this disk, either READ_WRITE
or READ_ONLY. If you are attaching or creating a boot disk, this must
read-write mode.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter
def source(self) -> Optional[str]:
"""
The name (**not self_link**)
of the disk (such as those managed by `compute.Disk`) to attach.
> **Note:** Either `source` or `source_image` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="sourceImage")
def source_image(self) -> Optional[str]:
"""
The image from which to
initialize this disk. This can be one of: the image's `self_link`,
`projects/{project}/global/images/{image}`,
`projects/{project}/global/images/family/{family}`, `global/images/{image}`,
`global/images/family/{family}`, `family/{family}`, `{project}/{family}`,
`{project}/{image}`, `{family}`, or `{image}`.
> **Note:** Either `source` or `source_image` is **required** in a disk block unless the disk type is `local-ssd`. Check the API [docs](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates/insert) for details.
"""
return pulumi.get(self, "source_image")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateDiskDiskEncryptionKey(dict):
def __init__(__self__, *,
kms_key_self_link: str):
"""
:param str kms_key_self_link: The self link of the encryption key that is stored in Google Cloud KMS
"""
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> str:
"""
The self link of the encryption key that is stored in Google Cloud KMS
"""
return pulumi.get(self, "kms_key_self_link")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateGuestAccelerator(dict):
def __init__(__self__, *,
count: float,
type: str):
"""
:param float count: The number of the guest accelerator cards exposed to this instance.
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> float:
"""
The number of the guest accelerator cards exposed to this instance.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateNetworkInterface(dict):
def __init__(__self__, *,
access_configs: Optional[List['outputs.InstanceTemplateNetworkInterfaceAccessConfig']] = None,
alias_ip_ranges: Optional[List['outputs.InstanceTemplateNetworkInterfaceAliasIpRange']] = None,
name: Optional[str] = None,
network: Optional[str] = None,
network_ip: Optional[str] = None,
subnetwork: Optional[str] = None,
subnetwork_project: Optional[str] = None):
"""
:param List['InstanceTemplateNetworkInterfaceAccessConfigArgs'] access_configs: Access configurations, i.e. IPs via which this
instance can be accessed via the Internet. Omit to ensure that the instance
is not accessible from the Internet (this means that ssh provisioners will
not work unless you can send traffic to the instance's
network (e.g. via tunnel or because it is running on another cloud instance
on that network). This block can be repeated multiple times. Structure documented below.
:param List['InstanceTemplateNetworkInterfaceAliasIpRangeArgs'] alias_ip_ranges: An
array of alias IP ranges for this network interface. Can only be specified for network
interfaces on subnet-mode networks. Structure documented below.
:param str name: The name of the instance template. If you leave
this blank, the provider will auto-generate a unique name.
:param str network: The name or self_link of the network to attach this interface to.
Use `network` attribute for Legacy or Auto subnetted networks and
`subnetwork` for custom subnetted networks.
:param str network_ip: The private IP address to assign to the instance. If
empty, the address will be automatically assigned.
:param str subnetwork: the name of the subnetwork to attach this interface
to. The subnetwork must exist in the same `region` this instance will be
created in. Either `network` or `subnetwork` must be provided.
:param str subnetwork_project: The ID of the project in which the subnetwork belongs.
If it is not provided, the provider project is used.
"""
if access_configs is not None:
pulumi.set(__self__, "access_configs", access_configs)
if alias_ip_ranges is not None:
pulumi.set(__self__, "alias_ip_ranges", alias_ip_ranges)
if name is not None:
pulumi.set(__self__, "name", name)
if network is not None:
pulumi.set(__self__, "network", network)
if network_ip is not None:
pulumi.set(__self__, "network_ip", network_ip)
if subnetwork is not None:
pulumi.set(__self__, "subnetwork", subnetwork)
if subnetwork_project is not None:
pulumi.set(__self__, "subnetwork_project", subnetwork_project)
@property
@pulumi.getter(name="accessConfigs")
def access_configs(self) -> Optional[List['outputs.InstanceTemplateNetworkInterfaceAccessConfig']]:
"""
Access configurations, i.e. IPs via which this
instance can be accessed via the Internet. Omit to ensure that the instance
is not accessible from the Internet (this means that ssh provisioners will
not work unless you can send traffic to the instance's
network (e.g. via tunnel or because it is running on another cloud instance
on that network). This block can be repeated multiple times. Structure documented below.
"""
return pulumi.get(self, "access_configs")
@property
@pulumi.getter(name="aliasIpRanges")
def alias_ip_ranges(self) -> Optional[List['outputs.InstanceTemplateNetworkInterfaceAliasIpRange']]:
"""
An
array of alias IP ranges for this network interface. Can only be specified for network
interfaces on subnet-mode networks. Structure documented below.
"""
return pulumi.get(self, "alias_ip_ranges")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the instance template. If you leave
this blank, the provider will auto-generate a unique name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> Optional[str]:
"""
The name or self_link of the network to attach this interface to.
Use `network` attribute for Legacy or Auto subnetted networks and
`subnetwork` for custom subnetted networks.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="networkIp")
def network_ip(self) -> Optional[str]:
"""
The private IP address to assign to the instance. If
empty, the address will be automatically assigned.
"""
return pulumi.get(self, "network_ip")
@property
@pulumi.getter
def subnetwork(self) -> Optional[str]:
"""
the name of the subnetwork to attach this interface
to. The subnetwork must exist in the same `region` this instance will be
created in. Either `network` or `subnetwork` must be provided.
"""
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter(name="subnetworkProject")
def subnetwork_project(self) -> Optional[str]:
"""
The ID of the project in which the subnetwork belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "subnetwork_project")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateNetworkInterfaceAccessConfig(dict):
def __init__(__self__, *,
nat_ip: Optional[str] = None,
network_tier: Optional[str] = None,
public_ptr_domain_name: Optional[str] = None):
"""
:param str nat_ip: The IP address that will be 1:1 mapped to the instance's
network ip. If not given, one will be generated.
:param str network_tier: The [networking tier][network-tier] used for configuring
this instance template. This field can take the following values: PREMIUM or
STANDARD. If this field is not specified, it is assumed to be PREMIUM.
"""
if nat_ip is not None:
pulumi.set(__self__, "nat_ip", nat_ip)
if network_tier is not None:
pulumi.set(__self__, "network_tier", network_tier)
if public_ptr_domain_name is not None:
pulumi.set(__self__, "public_ptr_domain_name", public_ptr_domain_name)
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> Optional[str]:
"""
The IP address that will be 1:1 mapped to the instance's
network ip. If not given, one will be generated.
"""
return pulumi.get(self, "nat_ip")
@property
@pulumi.getter(name="networkTier")
def network_tier(self) -> Optional[str]:
"""
The [networking tier][network-tier] used for configuring
this instance template. This field can take the following values: PREMIUM or
STANDARD. If this field is not specified, it is assumed to be PREMIUM.
"""
return pulumi.get(self, "network_tier")
@property
@pulumi.getter(name="publicPtrDomainName")
def public_ptr_domain_name(self) -> Optional[str]:
return pulumi.get(self, "public_ptr_domain_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateNetworkInterfaceAliasIpRange(dict):
def __init__(__self__, *,
ip_cidr_range: str,
subnetwork_range_name: Optional[str] = None):
"""
:param str ip_cidr_range: The IP CIDR range represented by this alias IP range. This IP CIDR range
must belong to the specified subnetwork and cannot contain IP addresses reserved by
system or used by other network interfaces. At the time of writing only a
netmask (e.g. /24) may be supplied, with a CIDR format resulting in an API
error.
:param str subnetwork_range_name: The subnetwork secondary range name specifying
the secondary range from which to allocate the IP CIDR range for this alias IP
range. If left unspecified, the primary range of the subnetwork will be used.
"""
pulumi.set(__self__, "ip_cidr_range", ip_cidr_range)
if subnetwork_range_name is not None:
pulumi.set(__self__, "subnetwork_range_name", subnetwork_range_name)
@property
@pulumi.getter(name="ipCidrRange")
def ip_cidr_range(self) -> str:
"""
The IP CIDR range represented by this alias IP range. This IP CIDR range
must belong to the specified subnetwork and cannot contain IP addresses reserved by
system or used by other network interfaces. At the time of writing only a
netmask (e.g. /24) may be supplied, with a CIDR format resulting in an API
error.
"""
return pulumi.get(self, "ip_cidr_range")
@property
@pulumi.getter(name="subnetworkRangeName")
def subnetwork_range_name(self) -> Optional[str]:
"""
The subnetwork secondary range name specifying
the secondary range from which to allocate the IP CIDR range for this alias IP
range. If left unspecified, the primary range of the subnetwork will be used.
"""
return pulumi.get(self, "subnetwork_range_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateScheduling(dict):
def __init__(__self__, *,
automatic_restart: Optional[bool] = None,
min_node_cpus: Optional[float] = None,
node_affinities: Optional[List['outputs.InstanceTemplateSchedulingNodeAffinity']] = None,
on_host_maintenance: Optional[str] = None,
preemptible: Optional[bool] = None):
"""
:param bool automatic_restart: Specifies whether the instance should be
automatically restarted if it is terminated by Compute Engine (not
terminated by a user). This defaults to true.
:param List['InstanceTemplateSchedulingNodeAffinityArgs'] node_affinities: Specifies node affinities or anti-affinities
to determine which sole-tenant nodes your instances and managed instance
groups will use as host systems. Read more on sole-tenant node creation
[here](https://cloud.google.com/compute/docs/nodes/create-nodes).
Structure documented below.
:param str on_host_maintenance: Defines the maintenance behavior for this
instance.
:param bool preemptible: Allows instance to be preempted. This defaults to
false. Read more on this
[here](https://cloud.google.com/compute/docs/instances/preemptible).
"""
if automatic_restart is not None:
pulumi.set(__self__, "automatic_restart", automatic_restart)
if min_node_cpus is not None:
pulumi.set(__self__, "min_node_cpus", min_node_cpus)
if node_affinities is not None:
pulumi.set(__self__, "node_affinities", node_affinities)
if on_host_maintenance is not None:
pulumi.set(__self__, "on_host_maintenance", on_host_maintenance)
if preemptible is not None:
pulumi.set(__self__, "preemptible", preemptible)
@property
@pulumi.getter(name="automaticRestart")
def automatic_restart(self) -> Optional[bool]:
"""
Specifies whether the instance should be
automatically restarted if it is terminated by Compute Engine (not
terminated by a user). This defaults to true.
"""
return pulumi.get(self, "automatic_restart")
@property
@pulumi.getter(name="minNodeCpus")
def min_node_cpus(self) -> Optional[float]:
return pulumi.get(self, "min_node_cpus")
@property
@pulumi.getter(name="nodeAffinities")
def node_affinities(self) -> Optional[List['outputs.InstanceTemplateSchedulingNodeAffinity']]:
"""
Specifies node affinities or anti-affinities
to determine which sole-tenant nodes your instances and managed instance
groups will use as host systems. Read more on sole-tenant node creation
[here](https://cloud.google.com/compute/docs/nodes/create-nodes).
Structure documented below.
"""
return pulumi.get(self, "node_affinities")
@property
@pulumi.getter(name="onHostMaintenance")
def on_host_maintenance(self) -> Optional[str]:
"""
Defines the maintenance behavior for this
instance.
"""
return pulumi.get(self, "on_host_maintenance")
@property
@pulumi.getter
def preemptible(self) -> Optional[bool]:
"""
Allows instance to be preempted. This defaults to
false. Read more on this
[here](https://cloud.google.com/compute/docs/instances/preemptible).
"""
return pulumi.get(self, "preemptible")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateSchedulingNodeAffinity(dict):
def __init__(__self__, *,
key: str,
operator: str,
values: List[str]):
"""
:param str key: The key for the node affinity label.
:param str operator: The operator. Can be `IN` for node-affinities
or `NOT_IN` for anti-affinities.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The key for the node affinity label.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
"""
The operator. Can be `IN` for node-affinities
or `NOT_IN` for anti-affinities.
"""
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> List[str]:
return pulumi.get(self, "values")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateServiceAccount(dict):
def __init__(__self__, *,
scopes: List[str],
email: Optional[str] = None):
"""
:param List[str] scopes: A list of service scopes. Both OAuth2 URLs and gcloud
short names are supported. To allow full access to all Cloud APIs, use the
`cloud-platform` scope. See a complete list of scopes [here](https://cloud.google.com/sdk/gcloud/reference/alpha/compute/instances/set-scopes#--scopes).
:param str email: The service account e-mail address. If not given, the
default Google Compute Engine service account is used.
"""
pulumi.set(__self__, "scopes", scopes)
if email is not None:
pulumi.set(__self__, "email", email)
@property
@pulumi.getter
def scopes(self) -> List[str]:
"""
A list of service scopes. Both OAuth2 URLs and gcloud
short names are supported. To allow full access to all Cloud APIs, use the
`cloud-platform` scope. See a complete list of scopes [here](https://cloud.google.com/sdk/gcloud/reference/alpha/compute/instances/set-scopes#--scopes).
"""
return pulumi.get(self, "scopes")
@property
@pulumi.getter
def email(self) -> Optional[str]:
"""
The service account e-mail address. If not given, the
default Google Compute Engine service account is used.
"""
return pulumi.get(self, "email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InstanceTemplateShieldedInstanceConfig(dict):
def __init__(__self__, *,
enable_integrity_monitoring: Optional[bool] = None,
enable_secure_boot: Optional[bool] = None,
enable_vtpm: Optional[bool] = None):
"""
:param bool enable_integrity_monitoring: -- Compare the most recent boot measurements to the integrity policy baseline and return a pair of pass/fail results depending on whether they match or not. Defaults to true.
:param bool enable_secure_boot: -- Verify the digital signature of all boot components, and halt the boot process if signature verification fails. Defaults to false.
:param bool enable_vtpm: -- Use a virtualized trusted platform module, which is a specialized computer chip you can use to encrypt objects like keys and certificates. Defaults to true.
"""
if enable_integrity_monitoring is not None:
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
if enable_secure_boot is not None:
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
if enable_vtpm is not None:
pulumi.set(__self__, "enable_vtpm", enable_vtpm)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> Optional[bool]:
"""
-- Compare the most recent boot measurements to the integrity policy baseline and return a pair of pass/fail results depending on whether they match or not. Defaults to true.
"""
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> Optional[bool]:
"""
-- Verify the digital signature of all boot components, and halt the boot process if signature verification fails. Defaults to false.
"""
return pulumi.get(self, "enable_secure_boot")
@property
@pulumi.getter(name="enableVtpm")
def enable_vtpm(self) -> Optional[bool]:
"""
-- Use a virtualized trusted platform module, which is a specialized computer chip you can use to encrypt objects like keys and certificates. Defaults to true.
"""
return pulumi.get(self, "enable_vtpm")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class InterconnectAttachmentPrivateInterconnectInfo(dict):
def __init__(__self__, *,
tag8021q: Optional[float] = None):
if tag8021q is not None:
pulumi.set(__self__, "tag8021q", tag8021q)
@property
@pulumi.getter
def tag8021q(self) -> Optional[float]:
return pulumi.get(self, "tag8021q")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedSslCertificateManaged(dict):
def __init__(__self__, *,
domains: List[str]):
"""
:param List[str] domains: Domains for which a managed SSL certificate will be valid. Currently,
there can be up to 100 domains in this list.
"""
pulumi.set(__self__, "domains", domains)
@property
@pulumi.getter
def domains(self) -> List[str]:
"""
Domains for which a managed SSL certificate will be valid. Currently,
there can be up to 100 domains in this list.
"""
return pulumi.get(self, "domains")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class MangedSslCertificateManaged(dict):
def __init__(__self__, *,
domains: List[str]):
pulumi.set(__self__, "domains", domains)
@property
@pulumi.getter
def domains(self) -> List[str]:
return pulumi.get(self, "domains")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NodeGroupAutoscalingPolicy(dict):
def __init__(__self__, *,
max_nodes: Optional[float] = None,
min_nodes: Optional[float] = None,
mode: Optional[str] = None):
"""
:param float max_nodes: Maximum size of the node group. Set to a value less than or equal
to 100 and greater than or equal to min-nodes.
:param float min_nodes: Minimum size of the node group. Must be less
than or equal to max-nodes. The default value is 0.
:param str mode: The autoscaling mode. Set to one of the following:
- OFF: Disables the autoscaler.
- ON: Enables scaling in and scaling out.
- ONLY_SCALE_OUT: Enables only scaling out.
You must use this mode if your node groups are configured to
restart their hosted VMs on minimal servers.
Possible values are `OFF`, `ON`, and `ONLY_SCALE_OUT`.
"""
if max_nodes is not None:
pulumi.set(__self__, "max_nodes", max_nodes)
if min_nodes is not None:
pulumi.set(__self__, "min_nodes", min_nodes)
if mode is not None:
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter(name="maxNodes")
def max_nodes(self) -> Optional[float]:
"""
Maximum size of the node group. Set to a value less than or equal
to 100 and greater than or equal to min-nodes.
"""
return pulumi.get(self, "max_nodes")
@property
@pulumi.getter(name="minNodes")
def min_nodes(self) -> Optional[float]:
"""
Minimum size of the node group. Must be less
than or equal to max-nodes. The default value is 0.
"""
return pulumi.get(self, "min_nodes")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
The autoscaling mode. Set to one of the following:
- OFF: Disables the autoscaler.
- ON: Enables scaling in and scaling out.
- ONLY_SCALE_OUT: Enables only scaling out.
You must use this mode if your node groups are configured to
restart their hosted VMs on minimal servers.
Possible values are `OFF`, `ON`, and `ONLY_SCALE_OUT`.
"""
return pulumi.get(self, "mode")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NodeTemplateNodeTypeFlexibility(dict):
def __init__(__self__, *,
cpus: Optional[str] = None,
local_ssd: Optional[str] = None,
memory: Optional[str] = None):
"""
:param str cpus: Number of virtual CPUs to use.
:param str local_ssd: -
Use local SSD
:param str memory: Physical memory available to the node, defined in MB.
"""
if cpus is not None:
pulumi.set(__self__, "cpus", cpus)
if local_ssd is not None:
pulumi.set(__self__, "local_ssd", local_ssd)
if memory is not None:
pulumi.set(__self__, "memory", memory)
@property
@pulumi.getter
def cpus(self) -> Optional[str]:
"""
Number of virtual CPUs to use.
"""
return pulumi.get(self, "cpus")
@property
@pulumi.getter(name="localSsd")
def local_ssd(self) -> Optional[str]:
"""
-
Use local SSD
"""
return pulumi.get(self, "local_ssd")
@property
@pulumi.getter
def memory(self) -> Optional[str]:
"""
Physical memory available to the node, defined in MB.
"""
return pulumi.get(self, "memory")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NodeTemplateServerBinding(dict):
def __init__(__self__, *,
type: str):
"""
:param str type: Type of server binding policy. If `RESTART_NODE_ON_ANY_SERVER`,
nodes using this template will restart on any physical server
following a maintenance event.
If `RESTART_NODE_ON_MINIMAL_SERVER`, nodes using this template
will restart on the same physical server following a maintenance
event, instead of being live migrated to or restarted on a new
physical server. This option may be useful if you are using
software licenses tied to the underlying server characteristics
such as physical sockets or cores, to avoid the need for
additional licenses when maintenance occurs. However, VMs on such
nodes will experience outages while maintenance is applied.
Possible values are `RESTART_NODE_ON_ANY_SERVER` and `RESTART_NODE_ON_MINIMAL_SERVERS`.
"""
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> str:
"""
Type of server binding policy. If `RESTART_NODE_ON_ANY_SERVER`,
nodes using this template will restart on any physical server
following a maintenance event.
If `RESTART_NODE_ON_MINIMAL_SERVER`, nodes using this template
will restart on the same physical server following a maintenance
event, instead of being live migrated to or restarted on a new
physical server. This option may be useful if you are using
software licenses tied to the underlying server characteristics
such as physical sockets or cores, to avoid the need for
additional licenses when maintenance occurs. However, VMs on such
nodes will experience outages while maintenance is applied.
Possible values are `RESTART_NODE_ON_ANY_SERVER` and `RESTART_NODE_ON_MINIMAL_SERVERS`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OrganizationSecurityPolicyRuleMatch(dict):
def __init__(__self__, *,
config: 'outputs.OrganizationSecurityPolicyRuleMatchConfig',
description: Optional[str] = None,
versioned_expr: Optional[str] = None):
"""
:param 'OrganizationSecurityPolicyRuleMatchConfigArgs' config: The configuration options for matching the rule.
Structure is documented below.
:param str description: A description of the rule.
:param str versioned_expr: Preconfigured versioned expression. For organization security policy rules,
the only supported type is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
pulumi.set(__self__, "config", config)
if description is not None:
pulumi.set(__self__, "description", description)
if versioned_expr is not None:
pulumi.set(__self__, "versioned_expr", versioned_expr)
@property
@pulumi.getter
def config(self) -> 'outputs.OrganizationSecurityPolicyRuleMatchConfig':
"""
The configuration options for matching the rule.
Structure is documented below.
"""
return pulumi.get(self, "config")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
A description of the rule.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="versionedExpr")
def versioned_expr(self) -> Optional[str]:
"""
Preconfigured versioned expression. For organization security policy rules,
the only supported type is "FIREWALL".
Default value is `FIREWALL`.
Possible values are `FIREWALL`.
"""
return pulumi.get(self, "versioned_expr")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OrganizationSecurityPolicyRuleMatchConfig(dict):
def __init__(__self__, *,
layer4_configs: List['outputs.OrganizationSecurityPolicyRuleMatchConfigLayer4Config'],
dest_ip_ranges: Optional[List[str]] = None,
src_ip_ranges: Optional[List[str]] = None):
"""
:param List['OrganizationSecurityPolicyRuleMatchConfigLayer4ConfigArgs'] layer4_configs: Pairs of IP protocols and ports that the rule should match.
Structure is documented below.
:param List[str] dest_ip_ranges: Destination IP address range in CIDR format. Required for
EGRESS rules.
:param List[str] src_ip_ranges: Source IP address range in CIDR format. Required for
INGRESS rules.
"""
pulumi.set(__self__, "layer4_configs", layer4_configs)
if dest_ip_ranges is not None:
pulumi.set(__self__, "dest_ip_ranges", dest_ip_ranges)
if src_ip_ranges is not None:
pulumi.set(__self__, "src_ip_ranges", src_ip_ranges)
@property
@pulumi.getter(name="layer4Configs")
def layer4_configs(self) -> List['outputs.OrganizationSecurityPolicyRuleMatchConfigLayer4Config']:
"""
Pairs of IP protocols and ports that the rule should match.
Structure is documented below.
"""
return pulumi.get(self, "layer4_configs")
@property
@pulumi.getter(name="destIpRanges")
def dest_ip_ranges(self) -> Optional[List[str]]:
"""
Destination IP address range in CIDR format. Required for
EGRESS rules.
"""
return pulumi.get(self, "dest_ip_ranges")
@property
@pulumi.getter(name="srcIpRanges")
def src_ip_ranges(self) -> Optional[List[str]]:
"""
Source IP address range in CIDR format. Required for
INGRESS rules.
"""
return pulumi.get(self, "src_ip_ranges")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OrganizationSecurityPolicyRuleMatchConfigLayer4Config(dict):
def __init__(__self__, *,
ip_protocol: str,
ports: Optional[List[str]] = None):
"""
:param str ip_protocol: The IP protocol to which this rule applies. The protocol
type is required when creating a firewall rule.
This value can either be one of the following well
known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp),
or the IP protocol number.
:param List[str] ports: An optional list of ports to which this rule applies. This field
is only applicable for UDP or TCP protocol. Each entry must be
either an integer or a range. If not specified, this rule
applies to connections through any port.
Example inputs include: ["22"], ["80","443"], and
["12345-12349"].
"""
pulumi.set(__self__, "ip_protocol", ip_protocol)
if ports is not None:
pulumi.set(__self__, "ports", ports)
@property
@pulumi.getter(name="ipProtocol")
def ip_protocol(self) -> str:
"""
The IP protocol to which this rule applies. The protocol
type is required when creating a firewall rule.
This value can either be one of the following well
known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp),
or the IP protocol number.
"""
return pulumi.get(self, "ip_protocol")
@property
@pulumi.getter
def ports(self) -> Optional[List[str]]:
"""
An optional list of ports to which this rule applies. This field
is only applicable for UDP or TCP protocol. Each entry must be
either an integer or a range. If not specified, this rule
applies to connections through any port.
Example inputs include: ["22"], ["80","443"], and
["12345-12349"].
"""
return pulumi.get(self, "ports")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PacketMirroringCollectorIlb(dict):
def __init__(__self__, *,
url: str):
"""
:param str url: The URL of the instances where this rule should be active.
"""
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def url(self) -> str:
"""
The URL of the instances where this rule should be active.
"""
return pulumi.get(self, "url")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PacketMirroringFilter(dict):
def __init__(__self__, *,
cidr_ranges: Optional[List[str]] = None,
ip_protocols: Optional[List[str]] = None):
"""
:param List[str] cidr_ranges: IP CIDR ranges that apply as a filter on the source (ingress) or
destination (egress) IP in the IP header. Only IPv4 is supported.
:param List[str] ip_protocols: Protocols that apply as a filter on mirrored traffic.
Each value may be one of `tcp`, `udp`, and `icmp`.
"""
if cidr_ranges is not None:
pulumi.set(__self__, "cidr_ranges", cidr_ranges)
if ip_protocols is not None:
pulumi.set(__self__, "ip_protocols", ip_protocols)
@property
@pulumi.getter(name="cidrRanges")
def cidr_ranges(self) -> Optional[List[str]]:
"""
IP CIDR ranges that apply as a filter on the source (ingress) or
destination (egress) IP in the IP header. Only IPv4 is supported.
"""
return pulumi.get(self, "cidr_ranges")
@property
@pulumi.getter(name="ipProtocols")
def ip_protocols(self) -> Optional[List[str]]:
"""
Protocols that apply as a filter on mirrored traffic.
Each value may be one of `tcp`, `udp`, and `icmp`.
"""
return pulumi.get(self, "ip_protocols")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PacketMirroringMirroredResources(dict):
def __init__(__self__, *,
instances: Optional[List['outputs.PacketMirroringMirroredResourcesInstance']] = None,
subnetworks: Optional[List['outputs.PacketMirroringMirroredResourcesSubnetwork']] = None,
tags: Optional[List[str]] = None):
"""
:param List['PacketMirroringMirroredResourcesInstanceArgs'] instances: All the listed instances will be mirrored. Specify at most 50.
Structure is documented below.
:param List['PacketMirroringMirroredResourcesSubnetworkArgs'] subnetworks: All instances in one of these subnetworks will be mirrored.
Structure is documented below.
:param List[str] tags: All instances with these tags will be mirrored.
"""
if instances is not None:
pulumi.set(__self__, "instances", instances)
if subnetworks is not None:
pulumi.set(__self__, "subnetworks", subnetworks)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def instances(self) -> Optional[List['outputs.PacketMirroringMirroredResourcesInstance']]:
"""
All the listed instances will be mirrored. Specify at most 50.
Structure is documented below.
"""
return pulumi.get(self, "instances")
@property
@pulumi.getter
def subnetworks(self) -> Optional[List['outputs.PacketMirroringMirroredResourcesSubnetwork']]:
"""
All instances in one of these subnetworks will be mirrored.
Structure is documented below.
"""
return pulumi.get(self, "subnetworks")
@property
@pulumi.getter
def tags(self) -> Optional[List[str]]:
"""
All instances with these tags will be mirrored.
"""
return pulumi.get(self, "tags")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PacketMirroringMirroredResourcesInstance(dict):
def __init__(__self__, *,
url: str):
"""
:param str url: The URL of the instances where this rule should be active.
"""
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def url(self) -> str:
"""
The URL of the instances where this rule should be active.
"""
return pulumi.get(self, "url")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PacketMirroringMirroredResourcesSubnetwork(dict):
def __init__(__self__, *,
url: str):
"""
:param str url: The URL of the instances where this rule should be active.
"""
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def url(self) -> str:
"""
The URL of the instances where this rule should be active.
"""
return pulumi.get(self, "url")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PacketMirroringNetwork(dict):
def __init__(__self__, *,
url: str):
"""
:param str url: The URL of the instances where this rule should be active.
"""
pulumi.set(__self__, "url", url)
@property
@pulumi.getter
def url(self) -> str:
"""
The URL of the instances where this rule should be active.
"""
return pulumi.get(self, "url")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PerInstanceConfigPreservedState(dict):
def __init__(__self__, *,
disks: Optional[List['outputs.PerInstanceConfigPreservedStateDisk']] = None,
metadata: Optional[Mapping[str, str]] = None):
"""
:param List['PerInstanceConfigPreservedStateDiskArgs'] disks: Stateful disks for the instance.
Structure is documented below.
:param Mapping[str, str] metadata: Preserved metadata defined for this instance. This is a list of key->value pairs.
"""
if disks is not None:
pulumi.set(__self__, "disks", disks)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
@property
@pulumi.getter
def disks(self) -> Optional[List['outputs.PerInstanceConfigPreservedStateDisk']]:
"""
Stateful disks for the instance.
Structure is documented below.
"""
return pulumi.get(self, "disks")
@property
@pulumi.getter
def metadata(self) -> Optional[Mapping[str, str]]:
"""
Preserved metadata defined for this instance. This is a list of key->value pairs.
"""
return pulumi.get(self, "metadata")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PerInstanceConfigPreservedStateDisk(dict):
def __init__(__self__, *,
device_name: str,
source: str,
delete_rule: Optional[str] = None,
mode: Optional[str] = None):
"""
:param str device_name: A unique device name that is reflected into the /dev/ tree of a Linux operating system running within the instance.
:param str source: The URI of an existing persistent disk to attach under the specified device-name in the format
`projects/project-id/zones/zone/disks/disk-name`.
:param str delete_rule: A value that prescribes what should happen to the stateful disk when the VM instance is deleted.
The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
`NEVER` detatch the disk when the VM is deleted, but not delete the disk.
`ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently
deleted from the instance group.
Default value is `NEVER`.
Possible values are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
:param str mode: The mode of the disk.
Default value is `READ_WRITE`.
Possible values are `READ_ONLY` and `READ_WRITE`.
"""
pulumi.set(__self__, "device_name", device_name)
pulumi.set(__self__, "source", source)
if delete_rule is not None:
pulumi.set(__self__, "delete_rule", delete_rule)
if mode is not None:
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
A unique device name that is reflected into the /dev/ tree of a Linux operating system running within the instance.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter
def source(self) -> str:
"""
The URI of an existing persistent disk to attach under the specified device-name in the format
`projects/project-id/zones/zone/disks/disk-name`.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="deleteRule")
def delete_rule(self) -> Optional[str]:
"""
A value that prescribes what should happen to the stateful disk when the VM instance is deleted.
The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
`NEVER` detatch the disk when the VM is deleted, but not delete the disk.
`ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently
deleted from the instance group.
Default value is `NEVER`.
Possible values are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
"""
return pulumi.get(self, "delete_rule")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
The mode of the disk.
Default value is `READ_WRITE`.
Possible values are `READ_ONLY` and `READ_WRITE`.
"""
return pulumi.get(self, "mode")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionAutoscalerAutoscalingPolicy(dict):
def __init__(__self__, *,
max_replicas: float,
min_replicas: float,
cooldown_period: Optional[float] = None,
cpu_utilization: Optional['outputs.RegionAutoscalerAutoscalingPolicyCpuUtilization'] = None,
load_balancing_utilization: Optional['outputs.RegionAutoscalerAutoscalingPolicyLoadBalancingUtilization'] = None,
metrics: Optional[List['outputs.RegionAutoscalerAutoscalingPolicyMetric']] = None,
mode: Optional[str] = None,
scale_down_control: Optional['outputs.RegionAutoscalerAutoscalingPolicyScaleDownControl'] = None):
"""
:param float max_replicas: The maximum number of instances that the autoscaler can scale up
to. This is required when creating or updating an autoscaler. The
maximum number of replicas should not be lower than minimal number
of replicas.
:param float min_replicas: The minimum number of replicas that the autoscaler can scale down
to. This cannot be less than 0. If not provided, autoscaler will
choose a default value depending on maximum number of instances
allowed.
:param float cooldown_period: The number of seconds that the autoscaler should wait before it
starts collecting information from a new instance. This prevents
the autoscaler from collecting information when the instance is
initializing, during which the collected usage would not be
reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of
numerous factors. We recommend that you test how long an
instance may take to initialize. To do this, create an instance
and time the startup process.
:param 'RegionAutoscalerAutoscalingPolicyCpuUtilizationArgs' cpu_utilization: Defines the CPU utilization policy that allows the autoscaler to
scale based on the average CPU utilization of a managed instance
group.
Structure is documented below.
:param 'RegionAutoscalerAutoscalingPolicyLoadBalancingUtilizationArgs' load_balancing_utilization: Configuration parameters of autoscaling based on a load balancer.
Structure is documented below.
:param List['RegionAutoscalerAutoscalingPolicyMetricArgs'] metrics: Configuration parameters of autoscaling based on a custom metric.
Structure is documented below.
:param str mode: Defines operating mode for this policy.
Default value is `ON`.
Possible values are `OFF`, `ONLY_UP`, and `ON`.
:param 'RegionAutoscalerAutoscalingPolicyScaleDownControlArgs' scale_down_control: Defines scale down controls to reduce the risk of response latency
and outages due to abrupt scale-in events
Structure is documented below.
"""
pulumi.set(__self__, "max_replicas", max_replicas)
pulumi.set(__self__, "min_replicas", min_replicas)
if cooldown_period is not None:
pulumi.set(__self__, "cooldown_period", cooldown_period)
if cpu_utilization is not None:
pulumi.set(__self__, "cpu_utilization", cpu_utilization)
if load_balancing_utilization is not None:
pulumi.set(__self__, "load_balancing_utilization", load_balancing_utilization)
if metrics is not None:
pulumi.set(__self__, "metrics", metrics)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if scale_down_control is not None:
pulumi.set(__self__, "scale_down_control", scale_down_control)
@property
@pulumi.getter(name="maxReplicas")
def max_replicas(self) -> float:
"""
The maximum number of instances that the autoscaler can scale up
to. This is required when creating or updating an autoscaler. The
maximum number of replicas should not be lower than minimal number
of replicas.
"""
return pulumi.get(self, "max_replicas")
@property
@pulumi.getter(name="minReplicas")
def min_replicas(self) -> float:
"""
The minimum number of replicas that the autoscaler can scale down
to. This cannot be less than 0. If not provided, autoscaler will
choose a default value depending on maximum number of instances
allowed.
"""
return pulumi.get(self, "min_replicas")
@property
@pulumi.getter(name="cooldownPeriod")
def cooldown_period(self) -> Optional[float]:
"""
The number of seconds that the autoscaler should wait before it
starts collecting information from a new instance. This prevents
the autoscaler from collecting information when the instance is
initializing, during which the collected usage would not be
reliable. The default time autoscaler waits is 60 seconds.
Virtual machine initialization times might vary because of
numerous factors. We recommend that you test how long an
instance may take to initialize. To do this, create an instance
and time the startup process.
"""
return pulumi.get(self, "cooldown_period")
@property
@pulumi.getter(name="cpuUtilization")
def cpu_utilization(self) -> Optional['outputs.RegionAutoscalerAutoscalingPolicyCpuUtilization']:
"""
Defines the CPU utilization policy that allows the autoscaler to
scale based on the average CPU utilization of a managed instance
group.
Structure is documented below.
"""
return pulumi.get(self, "cpu_utilization")
@property
@pulumi.getter(name="loadBalancingUtilization")
def load_balancing_utilization(self) -> Optional['outputs.RegionAutoscalerAutoscalingPolicyLoadBalancingUtilization']:
"""
Configuration parameters of autoscaling based on a load balancer.
Structure is documented below.
"""
return pulumi.get(self, "load_balancing_utilization")
@property
@pulumi.getter
def metrics(self) -> Optional[List['outputs.RegionAutoscalerAutoscalingPolicyMetric']]:
"""
Configuration parameters of autoscaling based on a custom metric.
Structure is documented below.
"""
return pulumi.get(self, "metrics")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
Defines operating mode for this policy.
Default value is `ON`.
Possible values are `OFF`, `ONLY_UP`, and `ON`.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="scaleDownControl")
def scale_down_control(self) -> Optional['outputs.RegionAutoscalerAutoscalingPolicyScaleDownControl']:
"""
Defines scale down controls to reduce the risk of response latency
and outages due to abrupt scale-in events
Structure is documented below.
"""
return pulumi.get(self, "scale_down_control")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionAutoscalerAutoscalingPolicyCpuUtilization(dict):
def __init__(__self__, *,
target: float):
"""
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> float:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionAutoscalerAutoscalingPolicyLoadBalancingUtilization(dict):
def __init__(__self__, *,
target: float):
"""
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def target(self) -> float:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionAutoscalerAutoscalingPolicyMetric(dict):
def __init__(__self__, *,
name: str,
filter: Optional[str] = None,
single_instance_assignment: Optional[float] = None,
target: Optional[float] = None,
type: Optional[str] = None):
"""
:param str name: The identifier (type) of the Stackdriver Monitoring metric.
The metric cannot have negative values.
The metric must have a value type of INT64 or DOUBLE.
:param str filter: A filter string to be used as the filter string for
a Stackdriver Monitoring TimeSeries.list API call.
This filter is used to select a specific TimeSeries for
the purpose of autoscaling and to determine whether the metric
is exporting per-instance or per-group data.
You can only use the AND operator for joining selectors.
You can only use direct equality comparison operator (=) without
any functions for each selector.
You can specify the metric in both the filter string and in the
metric field. However, if specified in both places, the metric must
be identical.
The monitored resource type determines what kind of values are
expected for the metric. If it is a gce_instance, the autoscaler
expects the metric to include a separate TimeSeries for each
instance in a group. In such a case, you cannot filter on resource
labels.
If the resource type is any other value, the autoscaler expects
this metric to contain values that apply to the entire autoscaled
instance group and resource label filtering can be performed to
point autoscaler at the correct TimeSeries to scale upon.
This is called a per-group metric for the purpose of autoscaling.
If not specified, the type defaults to gce_instance.
You should provide a filter that is selective enough to pick just
one TimeSeries for the autoscaled group or for each of the instances
(if you are using gce_instance resource type). If multiple
TimeSeries are returned upon the query execution, the autoscaler
will sum their respective values to obtain its scaling value.
:param float single_instance_assignment: If scaling is based on a per-group metric value that represents the
total amount of work to be done or resource usage, set this value to
an amount assigned for a single instance of the scaled group.
The autoscaler will keep the number of instances proportional to the
value of this metric, the metric itself should not change value due
to group resizing.
For example, a good metric to use with the target is
`pubsub.googleapis.com/subscription/num_undelivered_messages`
or a custom metric exporting the total number of requests coming to
your instances.
A bad example would be a metric exporting an average or median
latency, since this value can't include a chunk assignable to a
single instance, it could be better used with utilization_target
instead.
:param float target: Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
:param str type: Defines how target utilization value is expressed for a
Stackdriver Monitoring metric.
Possible values are `GAUGE`, `DELTA_PER_SECOND`, and `DELTA_PER_MINUTE`.
"""
pulumi.set(__self__, "name", name)
if filter is not None:
pulumi.set(__self__, "filter", filter)
if single_instance_assignment is not None:
pulumi.set(__self__, "single_instance_assignment", single_instance_assignment)
if target is not None:
pulumi.set(__self__, "target", target)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def name(self) -> str:
"""
The identifier (type) of the Stackdriver Monitoring metric.
The metric cannot have negative values.
The metric must have a value type of INT64 or DOUBLE.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def filter(self) -> Optional[str]:
"""
A filter string to be used as the filter string for
a Stackdriver Monitoring TimeSeries.list API call.
This filter is used to select a specific TimeSeries for
the purpose of autoscaling and to determine whether the metric
is exporting per-instance or per-group data.
You can only use the AND operator for joining selectors.
You can only use direct equality comparison operator (=) without
any functions for each selector.
You can specify the metric in both the filter string and in the
metric field. However, if specified in both places, the metric must
be identical.
The monitored resource type determines what kind of values are
expected for the metric. If it is a gce_instance, the autoscaler
expects the metric to include a separate TimeSeries for each
instance in a group. In such a case, you cannot filter on resource
labels.
If the resource type is any other value, the autoscaler expects
this metric to contain values that apply to the entire autoscaled
instance group and resource label filtering can be performed to
point autoscaler at the correct TimeSeries to scale upon.
This is called a per-group metric for the purpose of autoscaling.
If not specified, the type defaults to gce_instance.
You should provide a filter that is selective enough to pick just
one TimeSeries for the autoscaled group or for each of the instances
(if you are using gce_instance resource type). If multiple
TimeSeries are returned upon the query execution, the autoscaler
will sum their respective values to obtain its scaling value.
"""
return pulumi.get(self, "filter")
@property
@pulumi.getter(name="singleInstanceAssignment")
def single_instance_assignment(self) -> Optional[float]:
"""
If scaling is based on a per-group metric value that represents the
total amount of work to be done or resource usage, set this value to
an amount assigned for a single instance of the scaled group.
The autoscaler will keep the number of instances proportional to the
value of this metric, the metric itself should not change value due
to group resizing.
For example, a good metric to use with the target is
`pubsub.googleapis.com/subscription/num_undelivered_messages`
or a custom metric exporting the total number of requests coming to
your instances.
A bad example would be a metric exporting an average or median
latency, since this value can't include a chunk assignable to a
single instance, it could be better used with utilization_target
instead.
"""
return pulumi.get(self, "single_instance_assignment")
@property
@pulumi.getter
def target(self) -> Optional[float]:
"""
Fraction of backend capacity utilization (set in HTTP(s) load
balancing configuration) that autoscaler should maintain. Must
be a positive float value. If not defined, the default is 0.8.
"""
return pulumi.get(self, "target")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
Defines how target utilization value is expressed for a
Stackdriver Monitoring metric.
Possible values are `GAUGE`, `DELTA_PER_SECOND`, and `DELTA_PER_MINUTE`.
"""
return pulumi.get(self, "type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionAutoscalerAutoscalingPolicyScaleDownControl(dict):
def __init__(__self__, *,
max_scaled_down_replicas: Optional['outputs.RegionAutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas'] = None,
time_window_sec: Optional[float] = None):
"""
:param 'RegionAutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicasArgs' max_scaled_down_replicas: A nested object resource
Structure is documented below.
:param float time_window_sec: How long back autoscaling should look when computing recommendations
to include directives regarding slower scale down, as described above.
"""
if max_scaled_down_replicas is not None:
pulumi.set(__self__, "max_scaled_down_replicas", max_scaled_down_replicas)
if time_window_sec is not None:
pulumi.set(__self__, "time_window_sec", time_window_sec)
@property
@pulumi.getter(name="maxScaledDownReplicas")
def max_scaled_down_replicas(self) -> Optional['outputs.RegionAutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas']:
"""
A nested object resource
Structure is documented below.
"""
return pulumi.get(self, "max_scaled_down_replicas")
@property
@pulumi.getter(name="timeWindowSec")
def time_window_sec(self) -> Optional[float]:
"""
How long back autoscaling should look when computing recommendations
to include directives regarding slower scale down, as described above.
"""
return pulumi.get(self, "time_window_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionAutoscalerAutoscalingPolicyScaleDownControlMaxScaledDownReplicas(dict):
def __init__(__self__, *,
fixed: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float fixed: Specifies a fixed number of VM instances. This must be a positive
integer.
:param float percent: Specifies a percentage of instances between 0 to 100%, inclusive.
For example, specify 80 for 80%.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def fixed(self) -> Optional[float]:
"""
Specifies a fixed number of VM instances. This must be a positive
integer.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
Specifies a percentage of instances between 0 to 100%, inclusive.
For example, specify 80 for 80%.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceBackend(dict):
def __init__(__self__, *,
group: str,
balancing_mode: Optional[str] = None,
capacity_scaler: Optional[float] = None,
description: Optional[str] = None,
failover: Optional[bool] = None,
max_connections: Optional[float] = None,
max_connections_per_endpoint: Optional[float] = None,
max_connections_per_instance: Optional[float] = None,
max_rate: Optional[float] = None,
max_rate_per_endpoint: Optional[float] = None,
max_rate_per_instance: Optional[float] = None,
max_utilization: Optional[float] = None):
"""
:param str group: The fully-qualified URL of an Instance Group or Network Endpoint
Group resource. In case of instance group this defines the list
of instances that serve traffic. Member virtual machine
instances from each instance group must live in the same zone as
the instance group itself. No two backends in a backend service
are allowed to use same Instance Group resource.
For Network Endpoint Groups this defines list of endpoints. All
endpoints of Network Endpoint Group must be hosted on instances
located in the same zone as the Network Endpoint Group.
Backend services cannot mix Instance Group and
Network Endpoint Group backends.
When the `load_balancing_scheme` is INTERNAL, only instance groups
are supported.
Note that you must specify an Instance Group or Network Endpoint
Group resource using the fully-qualified URL, rather than a
partial URL.
:param str balancing_mode: Specifies the balancing mode for this backend.
Default value is `CONNECTION`.
Possible values are `UTILIZATION`, `RATE`, and `CONNECTION`.
:param float capacity_scaler: A multiplier applied to the group's maximum servicing capacity
(based on UTILIZATION, RATE or CONNECTION).
~>**NOTE**: This field cannot be set for
INTERNAL region backend services (default loadBalancingScheme),
but is required for non-INTERNAL backend service. The total
capacity_scaler for all backends must be non-zero.
A setting of 0 means the group is completely drained, offering
0% of its available Capacity. Valid range is [0.0,1.0].
:param str description: An optional description of this resource.
Provide this property when you create the resource.
:param bool failover: This field designates whether this is a failover backend. More
than one failover backend can be configured for a given RegionBackendService.
:param float max_connections: The maximum number of connections to the backend cluster.
Defaults to 1024.
:param float max_connections_per_endpoint: The max number of simultaneous connections that a single backend
network endpoint can handle. Cannot be set
for INTERNAL backend services.
This is used to calculate the capacity of the group. Can be
used in either CONNECTION or UTILIZATION balancing modes. For
CONNECTION mode, either maxConnections or
maxConnectionsPerEndpoint must be set.
:param float max_connections_per_instance: The max number of simultaneous connections that a single
backend instance can handle. Cannot be set for INTERNAL backend
services.
This is used to calculate the capacity of the group.
Can be used in either CONNECTION or UTILIZATION balancing modes.
For CONNECTION mode, either maxConnections or
maxConnectionsPerInstance must be set.
:param float max_rate: The max requests per second (RPS) of the group. Cannot be set
for INTERNAL backend services.
Can be used with either RATE or UTILIZATION balancing modes,
but required if RATE mode. Either maxRate or one
of maxRatePerInstance or maxRatePerEndpoint, as appropriate for
group type, must be set.
:param float max_rate_per_endpoint: The max requests per second (RPS) that a single backend network
endpoint can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerEndpoint must be set. Cannot be set
for INTERNAL backend services.
:param float max_rate_per_instance: The max requests per second (RPS) that a single backend
instance can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerInstance must be set. Cannot be set
for INTERNAL backend services.
:param float max_utilization: Used when balancingMode is UTILIZATION. This ratio defines the
CPU utilization target for the group. Valid range is [0.0, 1.0].
Cannot be set for INTERNAL backend services.
"""
pulumi.set(__self__, "group", group)
if balancing_mode is not None:
pulumi.set(__self__, "balancing_mode", balancing_mode)
if capacity_scaler is not None:
pulumi.set(__self__, "capacity_scaler", capacity_scaler)
if description is not None:
pulumi.set(__self__, "description", description)
if failover is not None:
pulumi.set(__self__, "failover", failover)
if max_connections is not None:
pulumi.set(__self__, "max_connections", max_connections)
if max_connections_per_endpoint is not None:
pulumi.set(__self__, "max_connections_per_endpoint", max_connections_per_endpoint)
if max_connections_per_instance is not None:
pulumi.set(__self__, "max_connections_per_instance", max_connections_per_instance)
if max_rate is not None:
pulumi.set(__self__, "max_rate", max_rate)
if max_rate_per_endpoint is not None:
pulumi.set(__self__, "max_rate_per_endpoint", max_rate_per_endpoint)
if max_rate_per_instance is not None:
pulumi.set(__self__, "max_rate_per_instance", max_rate_per_instance)
if max_utilization is not None:
pulumi.set(__self__, "max_utilization", max_utilization)
@property
@pulumi.getter
def group(self) -> str:
"""
The fully-qualified URL of an Instance Group or Network Endpoint
Group resource. In case of instance group this defines the list
of instances that serve traffic. Member virtual machine
instances from each instance group must live in the same zone as
the instance group itself. No two backends in a backend service
are allowed to use same Instance Group resource.
For Network Endpoint Groups this defines list of endpoints. All
endpoints of Network Endpoint Group must be hosted on instances
located in the same zone as the Network Endpoint Group.
Backend services cannot mix Instance Group and
Network Endpoint Group backends.
When the `load_balancing_scheme` is INTERNAL, only instance groups
are supported.
Note that you must specify an Instance Group or Network Endpoint
Group resource using the fully-qualified URL, rather than a
partial URL.
"""
return pulumi.get(self, "group")
@property
@pulumi.getter(name="balancingMode")
def balancing_mode(self) -> Optional[str]:
"""
Specifies the balancing mode for this backend.
Default value is `CONNECTION`.
Possible values are `UTILIZATION`, `RATE`, and `CONNECTION`.
"""
return pulumi.get(self, "balancing_mode")
@property
@pulumi.getter(name="capacityScaler")
def capacity_scaler(self) -> Optional[float]:
"""
A multiplier applied to the group's maximum servicing capacity
(based on UTILIZATION, RATE or CONNECTION).
~>**NOTE**: This field cannot be set for
INTERNAL region backend services (default loadBalancingScheme),
but is required for non-INTERNAL backend service. The total
capacity_scaler for all backends must be non-zero.
A setting of 0 means the group is completely drained, offering
0% of its available Capacity. Valid range is [0.0,1.0].
"""
return pulumi.get(self, "capacity_scaler")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of this resource.
Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def failover(self) -> Optional[bool]:
"""
This field designates whether this is a failover backend. More
than one failover backend can be configured for a given RegionBackendService.
"""
return pulumi.get(self, "failover")
@property
@pulumi.getter(name="maxConnections")
def max_connections(self) -> Optional[float]:
"""
The maximum number of connections to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_connections")
@property
@pulumi.getter(name="maxConnectionsPerEndpoint")
def max_connections_per_endpoint(self) -> Optional[float]:
"""
The max number of simultaneous connections that a single backend
network endpoint can handle. Cannot be set
for INTERNAL backend services.
This is used to calculate the capacity of the group. Can be
used in either CONNECTION or UTILIZATION balancing modes. For
CONNECTION mode, either maxConnections or
maxConnectionsPerEndpoint must be set.
"""
return pulumi.get(self, "max_connections_per_endpoint")
@property
@pulumi.getter(name="maxConnectionsPerInstance")
def max_connections_per_instance(self) -> Optional[float]:
"""
The max number of simultaneous connections that a single
backend instance can handle. Cannot be set for INTERNAL backend
services.
This is used to calculate the capacity of the group.
Can be used in either CONNECTION or UTILIZATION balancing modes.
For CONNECTION mode, either maxConnections or
maxConnectionsPerInstance must be set.
"""
return pulumi.get(self, "max_connections_per_instance")
@property
@pulumi.getter(name="maxRate")
def max_rate(self) -> Optional[float]:
"""
The max requests per second (RPS) of the group. Cannot be set
for INTERNAL backend services.
Can be used with either RATE or UTILIZATION balancing modes,
but required if RATE mode. Either maxRate or one
of maxRatePerInstance or maxRatePerEndpoint, as appropriate for
group type, must be set.
"""
return pulumi.get(self, "max_rate")
@property
@pulumi.getter(name="maxRatePerEndpoint")
def max_rate_per_endpoint(self) -> Optional[float]:
"""
The max requests per second (RPS) that a single backend network
endpoint can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerEndpoint must be set. Cannot be set
for INTERNAL backend services.
"""
return pulumi.get(self, "max_rate_per_endpoint")
@property
@pulumi.getter(name="maxRatePerInstance")
def max_rate_per_instance(self) -> Optional[float]:
"""
The max requests per second (RPS) that a single backend
instance can handle. This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerInstance must be set. Cannot be set
for INTERNAL backend services.
"""
return pulumi.get(self, "max_rate_per_instance")
@property
@pulumi.getter(name="maxUtilization")
def max_utilization(self) -> Optional[float]:
"""
Used when balancingMode is UTILIZATION. This ratio defines the
CPU utilization target for the group. Valid range is [0.0, 1.0].
Cannot be set for INTERNAL backend services.
"""
return pulumi.get(self, "max_utilization")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceCircuitBreakers(dict):
def __init__(__self__, *,
connect_timeout: Optional['outputs.RegionBackendServiceCircuitBreakersConnectTimeout'] = None,
max_connections: Optional[float] = None,
max_pending_requests: Optional[float] = None,
max_requests: Optional[float] = None,
max_requests_per_connection: Optional[float] = None,
max_retries: Optional[float] = None):
"""
:param 'RegionBackendServiceCircuitBreakersConnectTimeoutArgs' connect_timeout: The timeout for new network connections to hosts. Structure is documented below.
:param float max_connections: The maximum number of connections to the backend cluster.
Defaults to 1024.
:param float max_pending_requests: The maximum number of pending requests to the backend cluster.
Defaults to 1024.
:param float max_requests: The maximum number of parallel requests to the backend cluster.
Defaults to 1024.
:param float max_requests_per_connection: Maximum requests for a single backend connection. This parameter
is respected by both the HTTP/1.1 and HTTP/2 implementations. If
not specified, there is no limit. Setting this parameter to 1
will effectively disable keep alive.
:param float max_retries: The maximum number of parallel retries to the backend cluster.
Defaults to 3.
"""
if connect_timeout is not None:
pulumi.set(__self__, "connect_timeout", connect_timeout)
if max_connections is not None:
pulumi.set(__self__, "max_connections", max_connections)
if max_pending_requests is not None:
pulumi.set(__self__, "max_pending_requests", max_pending_requests)
if max_requests is not None:
pulumi.set(__self__, "max_requests", max_requests)
if max_requests_per_connection is not None:
pulumi.set(__self__, "max_requests_per_connection", max_requests_per_connection)
if max_retries is not None:
pulumi.set(__self__, "max_retries", max_retries)
@property
@pulumi.getter(name="connectTimeout")
def connect_timeout(self) -> Optional['outputs.RegionBackendServiceCircuitBreakersConnectTimeout']:
"""
The timeout for new network connections to hosts. Structure is documented below.
"""
return pulumi.get(self, "connect_timeout")
@property
@pulumi.getter(name="maxConnections")
def max_connections(self) -> Optional[float]:
"""
The maximum number of connections to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_connections")
@property
@pulumi.getter(name="maxPendingRequests")
def max_pending_requests(self) -> Optional[float]:
"""
The maximum number of pending requests to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_pending_requests")
@property
@pulumi.getter(name="maxRequests")
def max_requests(self) -> Optional[float]:
"""
The maximum number of parallel requests to the backend cluster.
Defaults to 1024.
"""
return pulumi.get(self, "max_requests")
@property
@pulumi.getter(name="maxRequestsPerConnection")
def max_requests_per_connection(self) -> Optional[float]:
"""
Maximum requests for a single backend connection. This parameter
is respected by both the HTTP/1.1 and HTTP/2 implementations. If
not specified, there is no limit. Setting this parameter to 1
will effectively disable keep alive.
"""
return pulumi.get(self, "max_requests_per_connection")
@property
@pulumi.getter(name="maxRetries")
def max_retries(self) -> Optional[float]:
"""
The maximum number of parallel retries to the backend cluster.
Defaults to 3.
"""
return pulumi.get(self, "max_retries")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceCircuitBreakersConnectTimeout(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceConsistentHash(dict):
def __init__(__self__, *,
http_cookie: Optional['outputs.RegionBackendServiceConsistentHashHttpCookie'] = None,
http_header_name: Optional[str] = None,
minimum_ring_size: Optional[float] = None):
"""
:param 'RegionBackendServiceConsistentHashHttpCookieArgs' http_cookie: Hash is based on HTTP Cookie. This field describes a HTTP cookie
that will be used as the hash key for the consistent hash load
balancer. If the cookie is not present, it will be generated.
This field is applicable if the sessionAffinity is set to HTTP_COOKIE.
Structure is documented below.
:param str http_header_name: The hash based on the value of the specified header field.
This field is applicable if the sessionAffinity is set to HEADER_FIELD.
:param float minimum_ring_size: The minimum number of virtual nodes to use for the hash ring.
Larger ring sizes result in more granular load
distributions. If the number of hosts in the load balancing pool
is larger than the ring size, each host will be assigned a single
virtual node.
Defaults to 1024.
"""
if http_cookie is not None:
pulumi.set(__self__, "http_cookie", http_cookie)
if http_header_name is not None:
pulumi.set(__self__, "http_header_name", http_header_name)
if minimum_ring_size is not None:
pulumi.set(__self__, "minimum_ring_size", minimum_ring_size)
@property
@pulumi.getter(name="httpCookie")
def http_cookie(self) -> Optional['outputs.RegionBackendServiceConsistentHashHttpCookie']:
"""
Hash is based on HTTP Cookie. This field describes a HTTP cookie
that will be used as the hash key for the consistent hash load
balancer. If the cookie is not present, it will be generated.
This field is applicable if the sessionAffinity is set to HTTP_COOKIE.
Structure is documented below.
"""
return pulumi.get(self, "http_cookie")
@property
@pulumi.getter(name="httpHeaderName")
def http_header_name(self) -> Optional[str]:
"""
The hash based on the value of the specified header field.
This field is applicable if the sessionAffinity is set to HEADER_FIELD.
"""
return pulumi.get(self, "http_header_name")
@property
@pulumi.getter(name="minimumRingSize")
def minimum_ring_size(self) -> Optional[float]:
"""
The minimum number of virtual nodes to use for the hash ring.
Larger ring sizes result in more granular load
distributions. If the number of hosts in the load balancing pool
is larger than the ring size, each host will be assigned a single
virtual node.
Defaults to 1024.
"""
return pulumi.get(self, "minimum_ring_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceConsistentHashHttpCookie(dict):
def __init__(__self__, *,
name: Optional[str] = None,
path: Optional[str] = None,
ttl: Optional['outputs.RegionBackendServiceConsistentHashHttpCookieTtl'] = None):
"""
:param str name: Name of the cookie.
:param str path: Path to set for the cookie.
:param 'RegionBackendServiceConsistentHashHttpCookieTtlArgs' ttl: Lifetime of the cookie.
Structure is documented below.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if ttl is not None:
pulumi.set(__self__, "ttl", ttl)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the cookie.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Path to set for the cookie.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def ttl(self) -> Optional['outputs.RegionBackendServiceConsistentHashHttpCookieTtl']:
"""
Lifetime of the cookie.
Structure is documented below.
"""
return pulumi.get(self, "ttl")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceConsistentHashHttpCookieTtl(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceFailoverPolicy(dict):
def __init__(__self__, *,
disable_connection_drain_on_failover: Optional[bool] = None,
drop_traffic_if_unhealthy: Optional[bool] = None,
failover_ratio: Optional[float] = None):
"""
:param bool disable_connection_drain_on_failover: On failover or failback, this field indicates whether connection drain
will be honored. Setting this to true has the following effect: connections
to the old active pool are not drained. Connections to the new active pool
use the timeout of 10 min (currently fixed). Setting to false has the
following effect: both old and new connections will have a drain timeout
of 10 min.
This can be set to true only if the protocol is TCP.
The default is false.
:param bool drop_traffic_if_unhealthy: This option is used only when no healthy VMs are detected in the primary
and backup instance groups. When set to true, traffic is dropped. When
set to false, new connections are sent across all VMs in the primary group.
The default is false.
:param float failover_ratio: The value of the field must be in [0, 1]. If the ratio of the healthy
VMs in the primary backend is at or below this number, traffic arriving
at the load-balanced IP will be directed to the failover backend.
In case where 'failoverRatio' is not set or all the VMs in the backup
backend are unhealthy, the traffic will be directed back to the primary
backend in the "force" mode, where traffic will be spread to the healthy
VMs with the best effort, or to all VMs when no VM is healthy.
This field is only used with l4 load balancing.
"""
if disable_connection_drain_on_failover is not None:
pulumi.set(__self__, "disable_connection_drain_on_failover", disable_connection_drain_on_failover)
if drop_traffic_if_unhealthy is not None:
pulumi.set(__self__, "drop_traffic_if_unhealthy", drop_traffic_if_unhealthy)
if failover_ratio is not None:
pulumi.set(__self__, "failover_ratio", failover_ratio)
@property
@pulumi.getter(name="disableConnectionDrainOnFailover")
def disable_connection_drain_on_failover(self) -> Optional[bool]:
"""
On failover or failback, this field indicates whether connection drain
will be honored. Setting this to true has the following effect: connections
to the old active pool are not drained. Connections to the new active pool
use the timeout of 10 min (currently fixed). Setting to false has the
following effect: both old and new connections will have a drain timeout
of 10 min.
This can be set to true only if the protocol is TCP.
The default is false.
"""
return pulumi.get(self, "disable_connection_drain_on_failover")
@property
@pulumi.getter(name="dropTrafficIfUnhealthy")
def drop_traffic_if_unhealthy(self) -> Optional[bool]:
"""
This option is used only when no healthy VMs are detected in the primary
and backup instance groups. When set to true, traffic is dropped. When
set to false, new connections are sent across all VMs in the primary group.
The default is false.
"""
return pulumi.get(self, "drop_traffic_if_unhealthy")
@property
@pulumi.getter(name="failoverRatio")
def failover_ratio(self) -> Optional[float]:
"""
The value of the field must be in [0, 1]. If the ratio of the healthy
VMs in the primary backend is at or below this number, traffic arriving
at the load-balanced IP will be directed to the failover backend.
In case where 'failoverRatio' is not set or all the VMs in the backup
backend are unhealthy, the traffic will be directed back to the primary
backend in the "force" mode, where traffic will be spread to the healthy
VMs with the best effort, or to all VMs when no VM is healthy.
This field is only used with l4 load balancing.
"""
return pulumi.get(self, "failover_ratio")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceLogConfig(dict):
def __init__(__self__, *,
enable: Optional[bool] = None,
sample_rate: Optional[float] = None):
"""
:param bool enable: Whether to enable logging for the load balancer traffic served by this backend service.
:param float sample_rate: This field can only be specified if logging is enabled for this backend service. The value of
the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer
where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported.
The default value is 1.0.
"""
if enable is not None:
pulumi.set(__self__, "enable", enable)
if sample_rate is not None:
pulumi.set(__self__, "sample_rate", sample_rate)
@property
@pulumi.getter
def enable(self) -> Optional[bool]:
"""
Whether to enable logging for the load balancer traffic served by this backend service.
"""
return pulumi.get(self, "enable")
@property
@pulumi.getter(name="sampleRate")
def sample_rate(self) -> Optional[float]:
"""
This field can only be specified if logging is enabled for this backend service. The value of
the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer
where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported.
The default value is 1.0.
"""
return pulumi.get(self, "sample_rate")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceOutlierDetection(dict):
def __init__(__self__, *,
base_ejection_time: Optional['outputs.RegionBackendServiceOutlierDetectionBaseEjectionTime'] = None,
consecutive_errors: Optional[float] = None,
consecutive_gateway_failure: Optional[float] = None,
enforcing_consecutive_errors: Optional[float] = None,
enforcing_consecutive_gateway_failure: Optional[float] = None,
enforcing_success_rate: Optional[float] = None,
interval: Optional['outputs.RegionBackendServiceOutlierDetectionInterval'] = None,
max_ejection_percent: Optional[float] = None,
success_rate_minimum_hosts: Optional[float] = None,
success_rate_request_volume: Optional[float] = None,
success_rate_stdev_factor: Optional[float] = None):
"""
:param 'RegionBackendServiceOutlierDetectionBaseEjectionTimeArgs' base_ejection_time: The base time that a host is ejected for. The real time is equal to the base
time multiplied by the number of times the host has been ejected. Defaults to
30000ms or 30s.
Structure is documented below.
:param float consecutive_errors: Number of errors before a host is ejected from the connection pool. When the
backend host is accessed over HTTP, a 5xx return code qualifies as an error.
Defaults to 5.
:param float consecutive_gateway_failure: The number of consecutive gateway failures (502, 503, 504 status or connection
errors that are mapped to one of those status codes) before a consecutive
gateway failure ejection occurs. Defaults to 5.
:param float enforcing_consecutive_errors: The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive 5xx. This setting can be used to disable
ejection or to ramp it up slowly. Defaults to 100.
:param float enforcing_consecutive_gateway_failure: The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive gateway failures. This setting can be
used to disable ejection or to ramp it up slowly. Defaults to 0.
:param float enforcing_success_rate: The percentage chance that a host will be actually ejected when an outlier
status is detected through success rate statistics. This setting can be used to
disable ejection or to ramp it up slowly. Defaults to 100.
:param 'RegionBackendServiceOutlierDetectionIntervalArgs' interval: Time interval between ejection sweep analysis. This can result in both new
ejections as well as hosts being returned to service. Defaults to 10 seconds.
Structure is documented below.
:param float max_ejection_percent: Maximum percentage of hosts in the load balancing pool for the backend service
that can be ejected. Defaults to 10%.
:param float success_rate_minimum_hosts: The number of hosts in a cluster that must have enough request volume to detect
success rate outliers. If the number of hosts is less than this setting, outlier
detection via success rate statistics is not performed for any host in the
cluster. Defaults to 5.
:param float success_rate_request_volume: The minimum number of total requests that must be collected in one interval (as
defined by the interval duration above) to include this host in success rate
based outlier detection. If the volume is lower than this setting, outlier
detection via success rate statistics is not performed for that host. Defaults
to 100.
:param float success_rate_stdev_factor: This factor is used to determine the ejection threshold for success rate outlier
ejection. The ejection threshold is the difference between the mean success
rate, and the product of this factor and the standard deviation of the mean
success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided
by a thousand to get a double. That is, if the desired factor is 1.9, the
runtime value should be 1900. Defaults to 1900.
"""
if base_ejection_time is not None:
pulumi.set(__self__, "base_ejection_time", base_ejection_time)
if consecutive_errors is not None:
pulumi.set(__self__, "consecutive_errors", consecutive_errors)
if consecutive_gateway_failure is not None:
pulumi.set(__self__, "consecutive_gateway_failure", consecutive_gateway_failure)
if enforcing_consecutive_errors is not None:
pulumi.set(__self__, "enforcing_consecutive_errors", enforcing_consecutive_errors)
if enforcing_consecutive_gateway_failure is not None:
pulumi.set(__self__, "enforcing_consecutive_gateway_failure", enforcing_consecutive_gateway_failure)
if enforcing_success_rate is not None:
pulumi.set(__self__, "enforcing_success_rate", enforcing_success_rate)
if interval is not None:
pulumi.set(__self__, "interval", interval)
if max_ejection_percent is not None:
pulumi.set(__self__, "max_ejection_percent", max_ejection_percent)
if success_rate_minimum_hosts is not None:
pulumi.set(__self__, "success_rate_minimum_hosts", success_rate_minimum_hosts)
if success_rate_request_volume is not None:
pulumi.set(__self__, "success_rate_request_volume", success_rate_request_volume)
if success_rate_stdev_factor is not None:
pulumi.set(__self__, "success_rate_stdev_factor", success_rate_stdev_factor)
@property
@pulumi.getter(name="baseEjectionTime")
def base_ejection_time(self) -> Optional['outputs.RegionBackendServiceOutlierDetectionBaseEjectionTime']:
"""
The base time that a host is ejected for. The real time is equal to the base
time multiplied by the number of times the host has been ejected. Defaults to
30000ms or 30s.
Structure is documented below.
"""
return pulumi.get(self, "base_ejection_time")
@property
@pulumi.getter(name="consecutiveErrors")
def consecutive_errors(self) -> Optional[float]:
"""
Number of errors before a host is ejected from the connection pool. When the
backend host is accessed over HTTP, a 5xx return code qualifies as an error.
Defaults to 5.
"""
return pulumi.get(self, "consecutive_errors")
@property
@pulumi.getter(name="consecutiveGatewayFailure")
def consecutive_gateway_failure(self) -> Optional[float]:
"""
The number of consecutive gateway failures (502, 503, 504 status or connection
errors that are mapped to one of those status codes) before a consecutive
gateway failure ejection occurs. Defaults to 5.
"""
return pulumi.get(self, "consecutive_gateway_failure")
@property
@pulumi.getter(name="enforcingConsecutiveErrors")
def enforcing_consecutive_errors(self) -> Optional[float]:
"""
The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive 5xx. This setting can be used to disable
ejection or to ramp it up slowly. Defaults to 100.
"""
return pulumi.get(self, "enforcing_consecutive_errors")
@property
@pulumi.getter(name="enforcingConsecutiveGatewayFailure")
def enforcing_consecutive_gateway_failure(self) -> Optional[float]:
"""
The percentage chance that a host will be actually ejected when an outlier
status is detected through consecutive gateway failures. This setting can be
used to disable ejection or to ramp it up slowly. Defaults to 0.
"""
return pulumi.get(self, "enforcing_consecutive_gateway_failure")
@property
@pulumi.getter(name="enforcingSuccessRate")
def enforcing_success_rate(self) -> Optional[float]:
"""
The percentage chance that a host will be actually ejected when an outlier
status is detected through success rate statistics. This setting can be used to
disable ejection or to ramp it up slowly. Defaults to 100.
"""
return pulumi.get(self, "enforcing_success_rate")
@property
@pulumi.getter
def interval(self) -> Optional['outputs.RegionBackendServiceOutlierDetectionInterval']:
"""
Time interval between ejection sweep analysis. This can result in both new
ejections as well as hosts being returned to service. Defaults to 10 seconds.
Structure is documented below.
"""
return pulumi.get(self, "interval")
@property
@pulumi.getter(name="maxEjectionPercent")
def max_ejection_percent(self) -> Optional[float]:
"""
Maximum percentage of hosts in the load balancing pool for the backend service
that can be ejected. Defaults to 10%.
"""
return pulumi.get(self, "max_ejection_percent")
@property
@pulumi.getter(name="successRateMinimumHosts")
def success_rate_minimum_hosts(self) -> Optional[float]:
"""
The number of hosts in a cluster that must have enough request volume to detect
success rate outliers. If the number of hosts is less than this setting, outlier
detection via success rate statistics is not performed for any host in the
cluster. Defaults to 5.
"""
return pulumi.get(self, "success_rate_minimum_hosts")
@property
@pulumi.getter(name="successRateRequestVolume")
def success_rate_request_volume(self) -> Optional[float]:
"""
The minimum number of total requests that must be collected in one interval (as
defined by the interval duration above) to include this host in success rate
based outlier detection. If the volume is lower than this setting, outlier
detection via success rate statistics is not performed for that host. Defaults
to 100.
"""
return pulumi.get(self, "success_rate_request_volume")
@property
@pulumi.getter(name="successRateStdevFactor")
def success_rate_stdev_factor(self) -> Optional[float]:
"""
This factor is used to determine the ejection threshold for success rate outlier
ejection. The ejection threshold is the difference between the mean success
rate, and the product of this factor and the standard deviation of the mean
success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided
by a thousand to get a double. That is, if the desired factor is 1.9, the
runtime value should be 1900. Defaults to 1900.
"""
return pulumi.get(self, "success_rate_stdev_factor")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceOutlierDetectionBaseEjectionTime(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionBackendServiceOutlierDetectionInterval(dict):
def __init__(__self__, *,
seconds: float,
nanos: Optional[float] = None):
"""
:param float seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> float:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionDiskDiskEncryptionKey(dict):
def __init__(__self__, *,
kms_key_name: Optional[str] = None,
raw_key: Optional[str] = None,
sha256: Optional[str] = None):
"""
:param str kms_key_name: The name of the encryption key that is stored in Google Cloud KMS.
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
:param str sha256: -
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
if kms_key_name is not None:
pulumi.set(__self__, "kms_key_name", kms_key_name)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
if sha256 is not None:
pulumi.set(__self__, "sha256", sha256)
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> Optional[str]:
"""
The name of the encryption key that is stored in Google Cloud KMS.
"""
return pulumi.get(self, "kms_key_name")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
"""
return pulumi.get(self, "raw_key")
@property
@pulumi.getter
def sha256(self) -> Optional[str]:
"""
-
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
return pulumi.get(self, "sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionDiskIamBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionDiskIamMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionDiskSourceSnapshotEncryptionKey(dict):
def __init__(__self__, *,
kms_key_name: Optional[str] = None,
raw_key: Optional[str] = None,
sha256: Optional[str] = None):
"""
:param str kms_key_name: The name of the encryption key that is stored in Google Cloud KMS.
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
:param str sha256: -
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
if kms_key_name is not None:
pulumi.set(__self__, "kms_key_name", kms_key_name)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
if sha256 is not None:
pulumi.set(__self__, "sha256", sha256)
@property
@pulumi.getter(name="kmsKeyName")
def kms_key_name(self) -> Optional[str]:
"""
The name of the encryption key that is stored in Google Cloud KMS.
"""
return pulumi.get(self, "kms_key_name")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
"""
return pulumi.get(self, "raw_key")
@property
@pulumi.getter
def sha256(self) -> Optional[str]:
"""
-
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
return pulumi.get(self, "sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckGrpcHealthCheck(dict):
def __init__(__self__, *,
grpc_service_name: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None):
"""
:param str grpc_service_name: The gRPC service name for the health check.
The value of grpcServiceName has the following meanings by convention:
- Empty serviceName means the overall status of all services at the backend.
- Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service.
The grpcServiceName can only be ASCII.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
if grpc_service_name is not None:
pulumi.set(__self__, "grpc_service_name", grpc_service_name)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
@property
@pulumi.getter(name="grpcServiceName")
def grpc_service_name(self) -> Optional[str]:
"""
The gRPC service name for the health check.
The value of grpcServiceName has the following meanings by convention:
- Empty serviceName means the overall status of all services at the backend.
- Non-empty serviceName means the health of that gRPC service, as defined by the owner of the service.
The grpcServiceName can only be ASCII.
"""
return pulumi.get(self, "grpc_service_name")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckHttp2HealthCheck(dict):
def __init__(__self__, *,
host: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request_path: Optional[str] = None,
response: Optional[str] = None):
"""
:param str host: The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request_path: The request path of the HTTP2 health check request.
The default value is /.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
The request path of the HTTP2 health check request.
The default value is /.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckHttpHealthCheck(dict):
def __init__(__self__, *,
host: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request_path: Optional[str] = None,
response: Optional[str] = None):
"""
:param str host: The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request_path: The request path of the HTTP2 health check request.
The default value is /.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
The request path of the HTTP2 health check request.
The default value is /.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckHttpsHealthCheck(dict):
def __init__(__self__, *,
host: Optional[str] = None,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request_path: Optional[str] = None,
response: Optional[str] = None):
"""
:param str host: The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request_path: The request path of the HTTP2 health check request.
The default value is /.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request_path is not None:
pulumi.set(__self__, "request_path", request_path)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The value of the host header in the HTTP2 health check request.
If left empty (default value), the public IP on behalf of which this health
check is performed will be used.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter(name="requestPath")
def request_path(self) -> Optional[str]:
"""
The request path of the HTTP2 health check request.
The default value is /.
"""
return pulumi.get(self, "request_path")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckLogConfig(dict):
def __init__(__self__, *,
enable: Optional[bool] = None):
"""
:param bool enable: Indicates whether or not to export logs. This is false by default,
which means no health check logging will be done.
"""
if enable is not None:
pulumi.set(__self__, "enable", enable)
@property
@pulumi.getter
def enable(self) -> Optional[bool]:
"""
Indicates whether or not to export logs. This is false by default,
which means no health check logging will be done.
"""
return pulumi.get(self, "enable")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckSslHealthCheck(dict):
def __init__(__self__, *,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request: Optional[str] = None,
response: Optional[str] = None):
"""
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request: The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request is not None:
pulumi.set(__self__, "request", request)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter
def request(self) -> Optional[str]:
"""
The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
"""
return pulumi.get(self, "request")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionHealthCheckTcpHealthCheck(dict):
def __init__(__self__, *,
port: Optional[float] = None,
port_name: Optional[str] = None,
port_specification: Optional[str] = None,
proxy_header: Optional[str] = None,
request: Optional[str] = None,
response: Optional[str] = None):
"""
:param float port: The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
:param str port_name: Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
:param str port_specification: Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
:param str proxy_header: Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
:param str request: The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
:param str response: The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
if port is not None:
pulumi.set(__self__, "port", port)
if port_name is not None:
pulumi.set(__self__, "port_name", port_name)
if port_specification is not None:
pulumi.set(__self__, "port_specification", port_specification)
if proxy_header is not None:
pulumi.set(__self__, "proxy_header", proxy_header)
if request is not None:
pulumi.set(__self__, "request", request)
if response is not None:
pulumi.set(__self__, "response", response)
@property
@pulumi.getter
def port(self) -> Optional[float]:
"""
The port number for the health check request.
Must be specified if portName and portSpecification are not set
or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="portName")
def port_name(self) -> Optional[str]:
"""
Port name as defined in InstanceGroup#NamedPort#name. If both port and
port_name are defined, port takes precedence.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter(name="portSpecification")
def port_specification(self) -> Optional[str]:
"""
Specifies how port is selected for health checking, can be one of the
following values:
* `USE_FIXED_PORT`: The port number in `port` is used for health checking.
* `USE_NAMED_PORT`: The `portName` is used for health checking.
* `USE_SERVING_PORT`: For NetworkEndpointGroup, the port specified for each
network endpoint is used for health checking. For other backends, the
port or named port specified in the Backend Service is used for health
checking.
If not specified, gRPC health check follows behavior specified in `port` and
`portName` fields.
Possible values are `USE_FIXED_PORT`, `USE_NAMED_PORT`, and `USE_SERVING_PORT`.
"""
return pulumi.get(self, "port_specification")
@property
@pulumi.getter(name="proxyHeader")
def proxy_header(self) -> Optional[str]:
"""
Specifies the type of proxy header to append before sending data to the
backend.
Default value is `NONE`.
Possible values are `NONE` and `PROXY_V1`.
"""
return pulumi.get(self, "proxy_header")
@property
@pulumi.getter
def request(self) -> Optional[str]:
"""
The application data to send once the SSL connection has been
established (default value is empty). If both request and response are
empty, the connection establishment alone will indicate health. The request
data can only be ASCII.
"""
return pulumi.get(self, "request")
@property
@pulumi.getter
def response(self) -> Optional[str]:
"""
The bytes to match against the beginning of the response data. If left empty
(the default value), any response will indicate health. The response data
can only be ASCII.
"""
return pulumi.get(self, "response")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionInstanceGroupManagerAutoHealingPolicies(dict):
def __init__(__self__, *,
health_check: str,
initial_delay_sec: float):
"""
:param str health_check: The health check resource that signals autohealing.
:param float initial_delay_sec: The number of seconds that the managed instance group waits before
it applies autohealing policies to new instances or recently recreated instances. Between 0 and 3600.
"""
pulumi.set(__self__, "health_check", health_check)
pulumi.set(__self__, "initial_delay_sec", initial_delay_sec)
@property
@pulumi.getter(name="healthCheck")
def health_check(self) -> str:
"""
The health check resource that signals autohealing.
"""
return pulumi.get(self, "health_check")
@property
@pulumi.getter(name="initialDelaySec")
def initial_delay_sec(self) -> float:
"""
The number of seconds that the managed instance group waits before
it applies autohealing policies to new instances or recently recreated instances. Between 0 and 3600.
"""
return pulumi.get(self, "initial_delay_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionInstanceGroupManagerNamedPort(dict):
def __init__(__self__, *,
name: str,
port: float):
"""
:param str name: - Version name.
:param float port: The port number.
- - -
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> str:
"""
- Version name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> float:
"""
The port number.
- - -
"""
return pulumi.get(self, "port")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionInstanceGroupManagerStatefulDisk(dict):
def __init__(__self__, *,
device_name: str,
delete_rule: Optional[str] = None):
"""
:param str device_name: , The device name of the disk to be attached.
:param str delete_rule: , A value that prescribes what should happen to the stateful disk when the VM instance is deleted. The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`. `NEVER` detatch the disk when the VM is deleted, but not delete the disk. `ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently deleted from the instance group. The default is `NEVER`.
"""
pulumi.set(__self__, "device_name", device_name)
if delete_rule is not None:
pulumi.set(__self__, "delete_rule", delete_rule)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
, The device name of the disk to be attached.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="deleteRule")
def delete_rule(self) -> Optional[str]:
"""
, A value that prescribes what should happen to the stateful disk when the VM instance is deleted. The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`. `NEVER` detatch the disk when the VM is deleted, but not delete the disk. `ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently deleted from the instance group. The default is `NEVER`.
"""
return pulumi.get(self, "delete_rule")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionInstanceGroupManagerUpdatePolicy(dict):
def __init__(__self__, *,
minimal_action: str,
type: str,
instance_redistribution_type: Optional[str] = None,
max_surge_fixed: Optional[float] = None,
max_surge_percent: Optional[float] = None,
max_unavailable_fixed: Optional[float] = None,
max_unavailable_percent: Optional[float] = None,
min_ready_sec: Optional[float] = None):
"""
:param str minimal_action: - Minimal action to be taken on an instance. You can specify either `RESTART` to restart existing instances or `REPLACE` to delete and create new instances from the target template. If you specify a `RESTART`, the Updater will attempt to perform that action only. However, if the Updater determines that the minimal action you specify is not enough to perform the update, it might perform a more disruptive action.
:param str type: - The type of update process. You can specify either `PROACTIVE` so that the instance group manager proactively executes actions in order to bring instances to their target versions or `OPPORTUNISTIC` so that no action is proactively executed but the update will be performed as part of other actions (for example, resizes or recreateInstances calls).
:param str instance_redistribution_type: - The instance redistribution policy for regional managed instance groups. Valid values are: `"PROACTIVE"`, `"NONE"`. If `PROACTIVE` (default), the group attempts to maintain an even distribution of VM instances across zones in the region. If `NONE`, proactive redistribution is disabled.
:param float max_surge_fixed: , The maximum number of instances that can be created above the specified targetSize during the update process. Conflicts with `max_surge_percent`. It has to be either 0 or at least equal to the number of zones. If fixed values are used, at least one of `max_unavailable_fixed` or `max_surge_fixed` must be greater than 0.
:param float max_surge_percent: , The maximum number of instances(calculated as percentage) that can be created above the specified targetSize during the update process. Conflicts with `max_surge_fixed`. Percent value is only allowed for regional managed instance groups with size at least 10.
:param float max_unavailable_fixed: , The maximum number of instances that can be unavailable during the update process. Conflicts with `max_unavailable_percent`. It has to be either 0 or at least equal to the number of zones. If fixed values are used, at least one of `max_unavailable_fixed` or `max_surge_fixed` must be greater than 0.
:param float max_unavailable_percent: , The maximum number of instances(calculated as percentage) that can be unavailable during the update process. Conflicts with `max_unavailable_fixed`. Percent value is only allowed for regional managed instance groups with size at least 10.
:param float min_ready_sec: , Minimum number of seconds to wait for after a newly created instance becomes available. This value must be from range [0, 3600]
- - -
"""
pulumi.set(__self__, "minimal_action", minimal_action)
pulumi.set(__self__, "type", type)
if instance_redistribution_type is not None:
pulumi.set(__self__, "instance_redistribution_type", instance_redistribution_type)
if max_surge_fixed is not None:
pulumi.set(__self__, "max_surge_fixed", max_surge_fixed)
if max_surge_percent is not None:
pulumi.set(__self__, "max_surge_percent", max_surge_percent)
if max_unavailable_fixed is not None:
pulumi.set(__self__, "max_unavailable_fixed", max_unavailable_fixed)
if max_unavailable_percent is not None:
pulumi.set(__self__, "max_unavailable_percent", max_unavailable_percent)
if min_ready_sec is not None:
pulumi.set(__self__, "min_ready_sec", min_ready_sec)
@property
@pulumi.getter(name="minimalAction")
def minimal_action(self) -> str:
"""
- Minimal action to be taken on an instance. You can specify either `RESTART` to restart existing instances or `REPLACE` to delete and create new instances from the target template. If you specify a `RESTART`, the Updater will attempt to perform that action only. However, if the Updater determines that the minimal action you specify is not enough to perform the update, it might perform a more disruptive action.
"""
return pulumi.get(self, "minimal_action")
@property
@pulumi.getter
def type(self) -> str:
"""
- The type of update process. You can specify either `PROACTIVE` so that the instance group manager proactively executes actions in order to bring instances to their target versions or `OPPORTUNISTIC` so that no action is proactively executed but the update will be performed as part of other actions (for example, resizes or recreateInstances calls).
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="instanceRedistributionType")
def instance_redistribution_type(self) -> Optional[str]:
"""
- The instance redistribution policy for regional managed instance groups. Valid values are: `"PROACTIVE"`, `"NONE"`. If `PROACTIVE` (default), the group attempts to maintain an even distribution of VM instances across zones in the region. If `NONE`, proactive redistribution is disabled.
"""
return pulumi.get(self, "instance_redistribution_type")
@property
@pulumi.getter(name="maxSurgeFixed")
def max_surge_fixed(self) -> Optional[float]:
"""
, The maximum number of instances that can be created above the specified targetSize during the update process. Conflicts with `max_surge_percent`. It has to be either 0 or at least equal to the number of zones. If fixed values are used, at least one of `max_unavailable_fixed` or `max_surge_fixed` must be greater than 0.
"""
return pulumi.get(self, "max_surge_fixed")
@property
@pulumi.getter(name="maxSurgePercent")
def max_surge_percent(self) -> Optional[float]:
"""
, The maximum number of instances(calculated as percentage) that can be created above the specified targetSize during the update process. Conflicts with `max_surge_fixed`. Percent value is only allowed for regional managed instance groups with size at least 10.
"""
return pulumi.get(self, "max_surge_percent")
@property
@pulumi.getter(name="maxUnavailableFixed")
def max_unavailable_fixed(self) -> Optional[float]:
"""
, The maximum number of instances that can be unavailable during the update process. Conflicts with `max_unavailable_percent`. It has to be either 0 or at least equal to the number of zones. If fixed values are used, at least one of `max_unavailable_fixed` or `max_surge_fixed` must be greater than 0.
"""
return pulumi.get(self, "max_unavailable_fixed")
@property
@pulumi.getter(name="maxUnavailablePercent")
def max_unavailable_percent(self) -> Optional[float]:
"""
, The maximum number of instances(calculated as percentage) that can be unavailable during the update process. Conflicts with `max_unavailable_fixed`. Percent value is only allowed for regional managed instance groups with size at least 10.
"""
return pulumi.get(self, "max_unavailable_percent")
@property
@pulumi.getter(name="minReadySec")
def min_ready_sec(self) -> Optional[float]:
"""
, Minimum number of seconds to wait for after a newly created instance becomes available. This value must be from range [0, 3600]
- - -
"""
return pulumi.get(self, "min_ready_sec")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionInstanceGroupManagerVersion(dict):
def __init__(__self__, *,
instance_template: str,
name: Optional[str] = None,
target_size: Optional['outputs.RegionInstanceGroupManagerVersionTargetSize'] = None):
"""
:param str instance_template: - The full URL to an instance template from which all new instances of this version will be created.
:param str name: - Version name.
:param 'RegionInstanceGroupManagerVersionTargetSizeArgs' target_size: - The number of instances calculated as a fixed number or a percentage depending on the settings. Structure is documented below.
"""
pulumi.set(__self__, "instance_template", instance_template)
if name is not None:
pulumi.set(__self__, "name", name)
if target_size is not None:
pulumi.set(__self__, "target_size", target_size)
@property
@pulumi.getter(name="instanceTemplate")
def instance_template(self) -> str:
"""
- The full URL to an instance template from which all new instances of this version will be created.
"""
return pulumi.get(self, "instance_template")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
- Version name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="targetSize")
def target_size(self) -> Optional['outputs.RegionInstanceGroupManagerVersionTargetSize']:
"""
- The number of instances calculated as a fixed number or a percentage depending on the settings. Structure is documented below.
"""
return pulumi.get(self, "target_size")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionInstanceGroupManagerVersionTargetSize(dict):
def __init__(__self__, *,
fixed: Optional[float] = None,
percent: Optional[float] = None):
"""
:param float fixed: , The number of instances which are managed for this version. Conflicts with `percent`.
:param float percent: , The number of instances (calculated as percentage) which are managed for this version. Conflicts with `fixed`.
Note that when using `percent`, rounding will be in favor of explicitly set `target_size` values; a managed instance group with 2 instances and 2 `version`s,
one of which has a `target_size.percent` of `60` will create 2 instances of that `version`.
"""
if fixed is not None:
pulumi.set(__self__, "fixed", fixed)
if percent is not None:
pulumi.set(__self__, "percent", percent)
@property
@pulumi.getter
def fixed(self) -> Optional[float]:
"""
, The number of instances which are managed for this version. Conflicts with `percent`.
"""
return pulumi.get(self, "fixed")
@property
@pulumi.getter
def percent(self) -> Optional[float]:
"""
, The number of instances (calculated as percentage) which are managed for this version. Conflicts with `fixed`.
Note that when using `percent`, rounding will be in favor of explicitly set `target_size` values; a managed instance group with 2 instances and 2 `version`s,
one of which has a `target_size.percent` of `60` will create 2 instances of that `version`.
"""
return pulumi.get(self, "percent")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionNetworkEndpointGroupAppEngine(dict):
def __init__(__self__, *,
service: Optional[str] = None,
url_mask: Optional[str] = None,
version: Optional[str] = None):
"""
:param str service: Optional serving service.
The service name must be 1-63 characters long, and comply with RFC1035.
Example value: "default", "my-service".
:param str url_mask: A template to parse function field from a request URL. URL mask allows
for routing to multiple Cloud Functions without having to create
multiple Network Endpoint Groups and backend services.
For example, request URLs "mydomain.com/function1" and "mydomain.com/function2"
can be backed by the same Serverless NEG with URL mask "/". The URL mask
will parse them to { function = "function1" } and { function = "function2" } respectively.
:param str version: Optional serving version.
The version must be 1-63 characters long, and comply with RFC1035.
Example value: "v1", "v2".
"""
if service is not None:
pulumi.set(__self__, "service", service)
if url_mask is not None:
pulumi.set(__self__, "url_mask", url_mask)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def service(self) -> Optional[str]:
"""
Optional serving service.
The service name must be 1-63 characters long, and comply with RFC1035.
Example value: "default", "my-service".
"""
return pulumi.get(self, "service")
@property
@pulumi.getter(name="urlMask")
def url_mask(self) -> Optional[str]:
"""
A template to parse function field from a request URL. URL mask allows
for routing to multiple Cloud Functions without having to create
multiple Network Endpoint Groups and backend services.
For example, request URLs "mydomain.com/function1" and "mydomain.com/function2"
can be backed by the same Serverless NEG with URL mask "/". The URL mask
will parse them to { function = "function1" } and { function = "function2" } respectively.
"""
return pulumi.get(self, "url_mask")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
Optional serving version.
The version must be 1-63 characters long, and comply with RFC1035.
Example value: "v1", "v2".
"""
return pulumi.get(self, "version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionNetworkEndpointGroupCloudFunction(dict):
def __init__(__self__, *,
function: Optional[str] = None,
url_mask: Optional[str] = None):
"""
:param str function: A user-defined name of the Cloud Function.
The function name is case-sensitive and must be 1-63 characters long.
Example value: "func1".
:param str url_mask: A template to parse function field from a request URL. URL mask allows
for routing to multiple Cloud Functions without having to create
multiple Network Endpoint Groups and backend services.
For example, request URLs "mydomain.com/function1" and "mydomain.com/function2"
can be backed by the same Serverless NEG with URL mask "/". The URL mask
will parse them to { function = "function1" } and { function = "function2" } respectively.
"""
if function is not None:
pulumi.set(__self__, "function", function)
if url_mask is not None:
pulumi.set(__self__, "url_mask", url_mask)
@property
@pulumi.getter
def function(self) -> Optional[str]:
"""
A user-defined name of the Cloud Function.
The function name is case-sensitive and must be 1-63 characters long.
Example value: "func1".
"""
return pulumi.get(self, "function")
@property
@pulumi.getter(name="urlMask")
def url_mask(self) -> Optional[str]:
"""
A template to parse function field from a request URL. URL mask allows
for routing to multiple Cloud Functions without having to create
multiple Network Endpoint Groups and backend services.
For example, request URLs "mydomain.com/function1" and "mydomain.com/function2"
can be backed by the same Serverless NEG with URL mask "/". The URL mask
will parse them to { function = "function1" } and { function = "function2" } respectively.
"""
return pulumi.get(self, "url_mask")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionNetworkEndpointGroupCloudRun(dict):
def __init__(__self__, *,
service: Optional[str] = None,
tag: Optional[str] = None,
url_mask: Optional[str] = None):
"""
:param str service: Optional serving service.
The service name must be 1-63 characters long, and comply with RFC1035.
Example value: "default", "my-service".
:param str tag: Cloud Run tag represents the "named-revision" to provide
additional fine-grained traffic routing information.
The tag must be 1-63 characters long, and comply with RFC1035.
Example value: "revision-0010".
:param str url_mask: A template to parse function field from a request URL. URL mask allows
for routing to multiple Cloud Functions without having to create
multiple Network Endpoint Groups and backend services.
For example, request URLs "mydomain.com/function1" and "mydomain.com/function2"
can be backed by the same Serverless NEG with URL mask "/". The URL mask
will parse them to { function = "function1" } and { function = "function2" } respectively.
"""
if service is not None:
pulumi.set(__self__, "service", service)
if tag is not None:
pulumi.set(__self__, "tag", tag)
if url_mask is not None:
pulumi.set(__self__, "url_mask", url_mask)
@property
@pulumi.getter
def service(self) -> Optional[str]:
"""
Optional serving service.
The service name must be 1-63 characters long, and comply with RFC1035.
Example value: "default", "my-service".
"""
return pulumi.get(self, "service")
@property
@pulumi.getter
def tag(self) -> Optional[str]:
"""
Cloud Run tag represents the "named-revision" to provide
additional fine-grained traffic routing information.
The tag must be 1-63 characters long, and comply with RFC1035.
Example value: "revision-0010".
"""
return pulumi.get(self, "tag")
@property
@pulumi.getter(name="urlMask")
def url_mask(self) -> Optional[str]:
"""
A template to parse function field from a request URL. URL mask allows
for routing to multiple Cloud Functions without having to create
multiple Network Endpoint Groups and backend services.
For example, request URLs "mydomain.com/function1" and "mydomain.com/function2"
can be backed by the same Serverless NEG with URL mask "/". The URL mask
will parse them to { function = "function1" } and { function = "function2" } respectively.
"""
return pulumi.get(self, "url_mask")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionPerInstanceConfigPreservedState(dict):
def __init__(__self__, *,
disks: Optional[List['outputs.RegionPerInstanceConfigPreservedStateDisk']] = None,
metadata: Optional[Mapping[str, str]] = None):
"""
:param List['RegionPerInstanceConfigPreservedStateDiskArgs'] disks: Stateful disks for the instance.
Structure is documented below.
:param Mapping[str, str] metadata: Preserved metadata defined for this instance. This is a list of key->value pairs.
"""
if disks is not None:
pulumi.set(__self__, "disks", disks)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
@property
@pulumi.getter
def disks(self) -> Optional[List['outputs.RegionPerInstanceConfigPreservedStateDisk']]:
"""
Stateful disks for the instance.
Structure is documented below.
"""
return pulumi.get(self, "disks")
@property
@pulumi.getter
def metadata(self) -> Optional[Mapping[str, str]]:
"""
Preserved metadata defined for this instance. This is a list of key->value pairs.
"""
return pulumi.get(self, "metadata")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionPerInstanceConfigPreservedStateDisk(dict):
def __init__(__self__, *,
device_name: str,
source: str,
delete_rule: Optional[str] = None,
mode: Optional[str] = None):
"""
:param str device_name: A unique device name that is reflected into the /dev/ tree of a Linux operating system running within the instance.
:param str source: The URI of an existing persistent disk to attach under the specified device-name in the format
`projects/project-id/zones/zone/disks/disk-name`.
:param str delete_rule: A value that prescribes what should happen to the stateful disk when the VM instance is deleted.
The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
`NEVER` detatch the disk when the VM is deleted, but not delete the disk.
`ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently
deleted from the instance group.
Default value is `NEVER`.
Possible values are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
:param str mode: The mode of the disk.
Default value is `READ_WRITE`.
Possible values are `READ_ONLY` and `READ_WRITE`.
"""
pulumi.set(__self__, "device_name", device_name)
pulumi.set(__self__, "source", source)
if delete_rule is not None:
pulumi.set(__self__, "delete_rule", delete_rule)
if mode is not None:
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
A unique device name that is reflected into the /dev/ tree of a Linux operating system running within the instance.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter
def source(self) -> str:
"""
The URI of an existing persistent disk to attach under the specified device-name in the format
`projects/project-id/zones/zone/disks/disk-name`.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="deleteRule")
def delete_rule(self) -> Optional[str]:
"""
A value that prescribes what should happen to the stateful disk when the VM instance is deleted.
The available options are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
`NEVER` detatch the disk when the VM is deleted, but not delete the disk.
`ON_PERMANENT_INSTANCE_DELETION` will delete the stateful disk when the VM is permanently
deleted from the instance group.
Default value is `NEVER`.
Possible values are `NEVER` and `ON_PERMANENT_INSTANCE_DELETION`.
"""
return pulumi.get(self, "delete_rule")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
The mode of the disk.
Default value is `READ_WRITE`.
Possible values are `READ_ONLY` and `READ_WRITE`.
"""
return pulumi.get(self, "mode")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapDefaultUrlRedirect(dict):
def __init__(__self__, *,
strip_query: bool,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None):
"""
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
pulumi.set(__self__, "strip_query", strip_query)
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> bool:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapHostRule(dict):
def __init__(__self__, *,
hosts: List[str],
path_matcher: str,
description: Optional[str] = None):
"""
:param List[str] hosts: The list of host patterns to match. They must be valid
hostnames, except * will match any string of ([a-z0-9-.]*). In
that case, * must be the first character and must be followed in
the pattern by either - or ..
:param str path_matcher: The name of the PathMatcher to use to match the path portion of
the URL if the hostRule matches the URL's host portion.
:param str description: Description of this test case.
"""
pulumi.set(__self__, "hosts", hosts)
pulumi.set(__self__, "path_matcher", path_matcher)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def hosts(self) -> List[str]:
"""
The list of host patterns to match. They must be valid
hostnames, except * will match any string of ([a-z0-9-.]*). In
that case, * must be the first character and must be followed in
the pattern by either - or ..
"""
return pulumi.get(self, "hosts")
@property
@pulumi.getter(name="pathMatcher")
def path_matcher(self) -> str:
"""
The name of the PathMatcher to use to match the path portion of
the URL if the hostRule matches the URL's host portion.
"""
return pulumi.get(self, "path_matcher")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of this test case.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcher(dict):
def __init__(__self__, *,
default_service: str,
name: str,
default_url_redirect: Optional['outputs.RegionUrlMapPathMatcherDefaultUrlRedirect'] = None,
description: Optional[str] = None,
path_rules: Optional[List['outputs.RegionUrlMapPathMatcherPathRule']] = None,
route_rules: Optional[List['outputs.RegionUrlMapPathMatcherRouteRule']] = None):
"""
:param str default_service: A reference to a RegionBackendService resource. This will be used if
none of the pathRules defined by this PathMatcher is matched by
the URL's path portion.
:param str name: The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
:param 'RegionUrlMapPathMatcherDefaultUrlRedirectArgs' default_url_redirect: When none of the specified hostRules match, the request is redirected to a URL specified
by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
defaultRouteAction must not be set.
Structure is documented below.
:param str description: Description of this test case.
:param List['RegionUrlMapPathMatcherPathRuleArgs'] path_rules: The list of path rules. Use this list instead of routeRules when routing based
on simple path matching is all that's required. The order by which path rules
are specified does not matter. Matches are always done on the longest-path-first
basis. For example: a pathRule with a path /a/b/c/* will match before /a/b/*
irrespective of the order in which those paths appear in this list. Within a
given pathMatcher, only one of pathRules or routeRules must be set.
Structure is documented below.
:param List['RegionUrlMapPathMatcherRouteRuleArgs'] route_rules: The list of ordered HTTP route rules. Use this list instead of pathRules when
advanced route matching and routing actions are desired. The order of specifying
routeRules matters: the first rule that matches will cause its specified routing
action to take effect. Within a given pathMatcher, only one of pathRules or
routeRules must be set. routeRules are not supported in UrlMaps intended for
External load balancers.
Structure is documented below.
"""
pulumi.set(__self__, "default_service", default_service)
pulumi.set(__self__, "name", name)
if default_url_redirect is not None:
pulumi.set(__self__, "default_url_redirect", default_url_redirect)
if description is not None:
pulumi.set(__self__, "description", description)
if path_rules is not None:
pulumi.set(__self__, "path_rules", path_rules)
if route_rules is not None:
pulumi.set(__self__, "route_rules", route_rules)
@property
@pulumi.getter(name="defaultService")
def default_service(self) -> str:
"""
A reference to a RegionBackendService resource. This will be used if
none of the pathRules defined by this PathMatcher is matched by
the URL's path portion.
"""
return pulumi.get(self, "default_service")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="defaultUrlRedirect")
def default_url_redirect(self) -> Optional['outputs.RegionUrlMapPathMatcherDefaultUrlRedirect']:
"""
When none of the specified hostRules match, the request is redirected to a URL specified
by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
defaultRouteAction must not be set.
Structure is documented below.
"""
return pulumi.get(self, "default_url_redirect")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of this test case.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="pathRules")
def path_rules(self) -> Optional[List['outputs.RegionUrlMapPathMatcherPathRule']]:
"""
The list of path rules. Use this list instead of routeRules when routing based
on simple path matching is all that's required. The order by which path rules
are specified does not matter. Matches are always done on the longest-path-first
basis. For example: a pathRule with a path /a/b/c/* will match before /a/b/*
irrespective of the order in which those paths appear in this list. Within a
given pathMatcher, only one of pathRules or routeRules must be set.
Structure is documented below.
"""
return pulumi.get(self, "path_rules")
@property
@pulumi.getter(name="routeRules")
def route_rules(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRule']]:
"""
The list of ordered HTTP route rules. Use this list instead of pathRules when
advanced route matching and routing actions are desired. The order of specifying
routeRules matters: the first rule that matches will cause its specified routing
action to take effect. Within a given pathMatcher, only one of pathRules or
routeRules must be set. routeRules are not supported in UrlMaps intended for
External load balancers.
Structure is documented below.
"""
return pulumi.get(self, "route_rules")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherDefaultUrlRedirect(dict):
def __init__(__self__, *,
strip_query: bool,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None):
"""
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
pulumi.set(__self__, "strip_query", strip_query)
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> bool:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRule(dict):
def __init__(__self__, *,
paths: List[str],
route_action: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteAction'] = None,
service: Optional[str] = None,
url_redirect: Optional['outputs.RegionUrlMapPathMatcherPathRuleUrlRedirect'] = None):
"""
:param List[str] paths: The list of path patterns to match. Each must start with / and the only place a
* is allowed is at the end following a /. The string fed to the path matcher
does not include any text after the first ? or #, and those chars are not
allowed here.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionArgs' route_action: In response to a matching path, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
:param str service: A reference to expected RegionBackendService resource the given URL should be mapped to.
:param 'RegionUrlMapPathMatcherPathRuleUrlRedirectArgs' url_redirect: When a path pattern is matched, the request is redirected to a URL specified
by urlRedirect. If urlRedirect is specified, service or routeAction must not
be set.
Structure is documented below.
"""
pulumi.set(__self__, "paths", paths)
if route_action is not None:
pulumi.set(__self__, "route_action", route_action)
if service is not None:
pulumi.set(__self__, "service", service)
if url_redirect is not None:
pulumi.set(__self__, "url_redirect", url_redirect)
@property
@pulumi.getter
def paths(self) -> List[str]:
"""
The list of path patterns to match. Each must start with / and the only place a
* is allowed is at the end following a /. The string fed to the path matcher
does not include any text after the first ? or #, and those chars are not
allowed here.
"""
return pulumi.get(self, "paths")
@property
@pulumi.getter(name="routeAction")
def route_action(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteAction']:
"""
In response to a matching path, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
"""
return pulumi.get(self, "route_action")
@property
@pulumi.getter
def service(self) -> Optional[str]:
"""
A reference to expected RegionBackendService resource the given URL should be mapped to.
"""
return pulumi.get(self, "service")
@property
@pulumi.getter(name="urlRedirect")
def url_redirect(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleUrlRedirect']:
"""
When a path pattern is matched, the request is redirected to a URL specified
by urlRedirect. If urlRedirect is specified, service or routeAction must not
be set.
Structure is documented below.
"""
return pulumi.get(self, "url_redirect")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteAction(dict):
def __init__(__self__, *,
cors_policy: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionCorsPolicy'] = None,
fault_injection_policy: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicy'] = None,
request_mirror_policy: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionRequestMirrorPolicy'] = None,
retry_policy: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicy'] = None,
timeout: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionTimeout'] = None,
url_rewrite: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionUrlRewrite'] = None,
weighted_backend_services: Optional[List['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendService']] = None):
"""
:param 'RegionUrlMapPathMatcherPathRuleRouteActionCorsPolicyArgs' cors_policy: The specification for allowing client side cross-origin requests. Please see W3C
Recommendation for Cross Origin Resource Sharing
Structure is documented below.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyArgs' fault_injection_policy: The specification for fault injection introduced into traffic to test the
resiliency of clients to backend service failure. As part of fault injection,
when clients send requests to a backend service, delays can be introduced by
Loadbalancer on a percentage of requests before sending those request to the
backend service. Similarly requests from clients can be aborted by the
Loadbalancer for a percentage of requests. timeout and retry_policy will be
ignored by clients that are configured with a fault_injection_policy.
Structure is documented below.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionRequestMirrorPolicyArgs' request_mirror_policy: Specifies the policy on how requests intended for the route's backends are
shadowed to a separate mirrored backend service. Loadbalancer does not wait for
responses from the shadow service. Prior to sending traffic to the shadow
service, the host / authority header is suffixed with -shadow.
Structure is documented below.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicyArgs' retry_policy: Specifies the retry policy associated with this route.
Structure is documented below.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionTimeoutArgs' timeout: Specifies the timeout for the selected route. Timeout is computed from the time
the request is has been fully processed (i.e. end-of-stream) up until the
response has been completely processed. Timeout includes all retries. If not
specified, the default value is 15 seconds.
Structure is documented below.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionUrlRewriteArgs' url_rewrite: The spec to modify the URL of the request, prior to forwarding the request to
the matched service
Structure is documented below.
:param List['RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceArgs'] weighted_backend_services: A list of weighted backend services to send traffic to when a route match
occurs. The weights determine the fraction of traffic that flows to their
corresponding backend service. If all traffic needs to go to a single backend
service, there must be one weightedBackendService with weight set to a non 0
number. Once a backendService is identified and before forwarding the request to
the backend service, advanced routing actions like Url rewrites and header
transformations are applied depending on additional settings specified in this
HttpRouteAction.
Structure is documented below.
"""
if cors_policy is not None:
pulumi.set(__self__, "cors_policy", cors_policy)
if fault_injection_policy is not None:
pulumi.set(__self__, "fault_injection_policy", fault_injection_policy)
if request_mirror_policy is not None:
pulumi.set(__self__, "request_mirror_policy", request_mirror_policy)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if url_rewrite is not None:
pulumi.set(__self__, "url_rewrite", url_rewrite)
if weighted_backend_services is not None:
pulumi.set(__self__, "weighted_backend_services", weighted_backend_services)
@property
@pulumi.getter(name="corsPolicy")
def cors_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionCorsPolicy']:
"""
The specification for allowing client side cross-origin requests. Please see W3C
Recommendation for Cross Origin Resource Sharing
Structure is documented below.
"""
return pulumi.get(self, "cors_policy")
@property
@pulumi.getter(name="faultInjectionPolicy")
def fault_injection_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicy']:
"""
The specification for fault injection introduced into traffic to test the
resiliency of clients to backend service failure. As part of fault injection,
when clients send requests to a backend service, delays can be introduced by
Loadbalancer on a percentage of requests before sending those request to the
backend service. Similarly requests from clients can be aborted by the
Loadbalancer for a percentage of requests. timeout and retry_policy will be
ignored by clients that are configured with a fault_injection_policy.
Structure is documented below.
"""
return pulumi.get(self, "fault_injection_policy")
@property
@pulumi.getter(name="requestMirrorPolicy")
def request_mirror_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionRequestMirrorPolicy']:
"""
Specifies the policy on how requests intended for the route's backends are
shadowed to a separate mirrored backend service. Loadbalancer does not wait for
responses from the shadow service. Prior to sending traffic to the shadow
service, the host / authority header is suffixed with -shadow.
Structure is documented below.
"""
return pulumi.get(self, "request_mirror_policy")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicy']:
"""
Specifies the retry policy associated with this route.
Structure is documented below.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def timeout(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionTimeout']:
"""
Specifies the timeout for the selected route. Timeout is computed from the time
the request is has been fully processed (i.e. end-of-stream) up until the
response has been completely processed. Timeout includes all retries. If not
specified, the default value is 15 seconds.
Structure is documented below.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="urlRewrite")
def url_rewrite(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionUrlRewrite']:
"""
The spec to modify the URL of the request, prior to forwarding the request to
the matched service
Structure is documented below.
"""
return pulumi.get(self, "url_rewrite")
@property
@pulumi.getter(name="weightedBackendServices")
def weighted_backend_services(self) -> Optional[List['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendService']]:
"""
A list of weighted backend services to send traffic to when a route match
occurs. The weights determine the fraction of traffic that flows to their
corresponding backend service. If all traffic needs to go to a single backend
service, there must be one weightedBackendService with weight set to a non 0
number. Once a backendService is identified and before forwarding the request to
the backend service, advanced routing actions like Url rewrites and header
transformations are applied depending on additional settings specified in this
HttpRouteAction.
Structure is documented below.
"""
return pulumi.get(self, "weighted_backend_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionCorsPolicy(dict):
def __init__(__self__, *,
disabled: bool,
allow_credentials: Optional[bool] = None,
allow_headers: Optional[List[str]] = None,
allow_methods: Optional[List[str]] = None,
allow_origin_regexes: Optional[List[str]] = None,
allow_origins: Optional[List[str]] = None,
expose_headers: Optional[List[str]] = None,
max_age: Optional[float] = None):
"""
:param bool disabled: If true, specifies the CORS policy is disabled.
:param bool allow_credentials: In response to a preflight request, setting this to true indicates that the
actual request can include user credentials. This translates to the Access-
Control-Allow-Credentials header. Defaults to false.
:param List[str] allow_headers: Specifies the content for the Access-Control-Allow-Headers header.
:param List[str] allow_methods: Specifies the content for the Access-Control-Allow-Methods header.
:param List[str] allow_origin_regexes: Specifies the regualar expression patterns that match allowed origins. For
regular expression grammar please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either allow_origins or allow_origin_regex.
:param List[str] allow_origins: Specifies the list of origins that will be allowed to do CORS requests. An
origin is allowed if it matches either allow_origins or allow_origin_regex.
:param List[str] expose_headers: Specifies the content for the Access-Control-Expose-Headers header.
:param float max_age: Specifies how long the results of a preflight request can be cached. This
translates to the content for the Access-Control-Max-Age header.
"""
pulumi.set(__self__, "disabled", disabled)
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if allow_headers is not None:
pulumi.set(__self__, "allow_headers", allow_headers)
if allow_methods is not None:
pulumi.set(__self__, "allow_methods", allow_methods)
if allow_origin_regexes is not None:
pulumi.set(__self__, "allow_origin_regexes", allow_origin_regexes)
if allow_origins is not None:
pulumi.set(__self__, "allow_origins", allow_origins)
if expose_headers is not None:
pulumi.set(__self__, "expose_headers", expose_headers)
if max_age is not None:
pulumi.set(__self__, "max_age", max_age)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
If true, specifies the CORS policy is disabled.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
In response to a preflight request, setting this to true indicates that the
actual request can include user credentials. This translates to the Access-
Control-Allow-Credentials header. Defaults to false.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter(name="allowHeaders")
def allow_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Headers header.
"""
return pulumi.get(self, "allow_headers")
@property
@pulumi.getter(name="allowMethods")
def allow_methods(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Methods header.
"""
return pulumi.get(self, "allow_methods")
@property
@pulumi.getter(name="allowOriginRegexes")
def allow_origin_regexes(self) -> Optional[List[str]]:
"""
Specifies the regualar expression patterns that match allowed origins. For
regular expression grammar please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either allow_origins or allow_origin_regex.
"""
return pulumi.get(self, "allow_origin_regexes")
@property
@pulumi.getter(name="allowOrigins")
def allow_origins(self) -> Optional[List[str]]:
"""
Specifies the list of origins that will be allowed to do CORS requests. An
origin is allowed if it matches either allow_origins or allow_origin_regex.
"""
return pulumi.get(self, "allow_origins")
@property
@pulumi.getter(name="exposeHeaders")
def expose_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Expose-Headers header.
"""
return pulumi.get(self, "expose_headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[float]:
"""
Specifies how long the results of a preflight request can be cached. This
translates to the content for the Access-Control-Max-Age header.
"""
return pulumi.get(self, "max_age")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicy(dict):
def __init__(__self__, *,
abort: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort'] = None,
delay: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay'] = None):
"""
:param 'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbortArgs' abort: The specification for how client requests are aborted as part of fault
injection.
Structure is documented below.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayArgs' delay: The specification for how client requests are delayed as part of fault
injection, before being sent to a backend service.
Structure is documented below.
"""
if abort is not None:
pulumi.set(__self__, "abort", abort)
if delay is not None:
pulumi.set(__self__, "delay", delay)
@property
@pulumi.getter
def abort(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort']:
"""
The specification for how client requests are aborted as part of fault
injection.
Structure is documented below.
"""
return pulumi.get(self, "abort")
@property
@pulumi.getter
def delay(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay']:
"""
The specification for how client requests are delayed as part of fault
injection, before being sent to a backend service.
Structure is documented below.
"""
return pulumi.get(self, "delay")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort(dict):
def __init__(__self__, *,
http_status: float,
percentage: float):
"""
:param float http_status: The HTTP status code used to abort the request. The value must be between 200
and 599 inclusive.
:param float percentage: The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
pulumi.set(__self__, "http_status", http_status)
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="httpStatus")
def http_status(self) -> float:
"""
The HTTP status code used to abort the request. The value must be between 200
and 599 inclusive.
"""
return pulumi.get(self, "http_status")
@property
@pulumi.getter
def percentage(self) -> float:
"""
The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay(dict):
def __init__(__self__, *,
fixed_delay: 'outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay',
percentage: float):
"""
:param 'RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelayArgs' fixed_delay: Specifies the value of the fixed delay interval.
Structure is documented below.
:param float percentage: The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
pulumi.set(__self__, "fixed_delay", fixed_delay)
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="fixedDelay")
def fixed_delay(self) -> 'outputs.RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay':
"""
Specifies the value of the fixed delay interval.
Structure is documented below.
"""
return pulumi.get(self, "fixed_delay")
@property
@pulumi.getter
def percentage(self) -> float:
"""
The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionRequestMirrorPolicy(dict):
def __init__(__self__, *,
backend_service: str):
"""
:param str backend_service: The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
"""
pulumi.set(__self__, "backend_service", backend_service)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
"""
return pulumi.get(self, "backend_service")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicy(dict):
def __init__(__self__, *,
num_retries: Optional[float] = None,
per_try_timeout: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout'] = None,
retry_conditions: Optional[List[str]] = None):
"""
:param float num_retries: Specifies the allowed number retries. This number must be > 0.
:param 'RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeoutArgs' per_try_timeout: Specifies a non-zero timeout per retry attempt.
Structure is documented below.
:param List[str] retry_conditions: Specifies one or more conditions when this retry rule applies. Valid values are:
- 5xx: Loadbalancer will attempt a retry if the backend service responds with
any 5xx response code, or if the backend service does not respond at all,
example: disconnects, reset, read timeout, connection failure, and refused
streams.
- gateway-error: Similar to 5xx, but only applies to response codes
502, 503 or 504.
- connect-failure: Loadbalancer will retry on failures
connecting to backend services, for example due to connection timeouts.
- retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
- refused-stream: Loadbalancer will retry if the backend service resets the stream with a
REFUSED_STREAM error code. This reset type indicates that it is safe to retry.
- cancelled: Loadbalancer will retry if the gRPC status code in the response
header is set to cancelled
- deadline-exceeded: Loadbalancer will retry if the
gRPC status code in the response header is set to deadline-exceeded
- resource-exhausted: Loadbalancer will retry if the gRPC status code in the response
header is set to resource-exhausted
- unavailable: Loadbalancer will retry if
the gRPC status code in the response header is set to unavailable
"""
if num_retries is not None:
pulumi.set(__self__, "num_retries", num_retries)
if per_try_timeout is not None:
pulumi.set(__self__, "per_try_timeout", per_try_timeout)
if retry_conditions is not None:
pulumi.set(__self__, "retry_conditions", retry_conditions)
@property
@pulumi.getter(name="numRetries")
def num_retries(self) -> Optional[float]:
"""
Specifies the allowed number retries. This number must be > 0.
"""
return pulumi.get(self, "num_retries")
@property
@pulumi.getter(name="perTryTimeout")
def per_try_timeout(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout']:
"""
Specifies a non-zero timeout per retry attempt.
Structure is documented below.
"""
return pulumi.get(self, "per_try_timeout")
@property
@pulumi.getter(name="retryConditions")
def retry_conditions(self) -> Optional[List[str]]:
"""
Specifies one or more conditions when this retry rule applies. Valid values are:
- 5xx: Loadbalancer will attempt a retry if the backend service responds with
any 5xx response code, or if the backend service does not respond at all,
example: disconnects, reset, read timeout, connection failure, and refused
streams.
- gateway-error: Similar to 5xx, but only applies to response codes
502, 503 or 504.
- connect-failure: Loadbalancer will retry on failures
connecting to backend services, for example due to connection timeouts.
- retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
- refused-stream: Loadbalancer will retry if the backend service resets the stream with a
REFUSED_STREAM error code. This reset type indicates that it is safe to retry.
- cancelled: Loadbalancer will retry if the gRPC status code in the response
header is set to cancelled
- deadline-exceeded: Loadbalancer will retry if the
gRPC status code in the response header is set to deadline-exceeded
- resource-exhausted: Loadbalancer will retry if the gRPC status code in the response
header is set to resource-exhausted
- unavailable: Loadbalancer will retry if
the gRPC status code in the response header is set to unavailable
"""
return pulumi.get(self, "retry_conditions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionUrlRewrite(dict):
def __init__(__self__, *,
host_rewrite: Optional[str] = None,
path_prefix_rewrite: Optional[str] = None):
"""
:param str host_rewrite: Prior to forwarding the request to the selected service, the request's host
header is replaced with contents of hostRewrite. The value must be between 1 and
255 characters.
:param str path_prefix_rewrite: Prior to forwarding the request to the selected backend service, the matching
portion of the request's path is replaced by pathPrefixRewrite. The value must
be between 1 and 1024 characters.
"""
if host_rewrite is not None:
pulumi.set(__self__, "host_rewrite", host_rewrite)
if path_prefix_rewrite is not None:
pulumi.set(__self__, "path_prefix_rewrite", path_prefix_rewrite)
@property
@pulumi.getter(name="hostRewrite")
def host_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected service, the request's host
header is replaced with contents of hostRewrite. The value must be between 1 and
255 characters.
"""
return pulumi.get(self, "host_rewrite")
@property
@pulumi.getter(name="pathPrefixRewrite")
def path_prefix_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected backend service, the matching
portion of the request's path is replaced by pathPrefixRewrite. The value must
be between 1 and 1024 characters.
"""
return pulumi.get(self, "path_prefix_rewrite")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendService(dict):
def __init__(__self__, *,
backend_service: str,
weight: float,
header_action: Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction'] = None):
"""
:param str backend_service: The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
:param float weight: Specifies the fraction of traffic sent to backendService, computed as weight /
(sum of all weightedBackendService weights in routeAction) . The selection of a
backend service is determined only for new traffic. Once a user's request has
been directed to a backendService, subsequent requests will be sent to the same
backendService as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
:param 'RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService. headerAction specified here take effect before
headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
pulumi.set(__self__, "backend_service", backend_service)
pulumi.set(__self__, "weight", weight)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter
def weight(self) -> float:
"""
Specifies the fraction of traffic sent to backendService, computed as weight /
(sum of all weightedBackendService weights in routeAction) . The selection of a
backend service is determined only for new traffic. Once a user's request has
been directed to a backendService, subsequent requests will be sent to the same
backendService as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
return pulumi.get(self, "weight")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService. headerAction specified here take effect before
headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the
backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request
prior to forwarding the request to the backendService.
:param List['RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response
prior to sending the response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the
backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request
prior to forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response
prior to sending the response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherPathRuleUrlRedirect(dict):
def __init__(__self__, *,
strip_query: bool,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None):
"""
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
pulumi.set(__self__, "strip_query", strip_query)
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> bool:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRule(dict):
def __init__(__self__, *,
priority: float,
header_action: Optional['outputs.RegionUrlMapPathMatcherRouteRuleHeaderAction'] = None,
match_rules: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRule']] = None,
route_action: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteAction'] = None,
service: Optional[str] = None,
url_redirect: Optional['outputs.RegionUrlMapPathMatcherRouteRuleUrlRedirect'] = None):
"""
:param float priority: For routeRules within a given pathMatcher, priority determines the order
in which load balancer will interpret routeRules. RouteRules are evaluated
in order of priority, from the lowest to highest number. The priority of
a rule decreases as its number increases (1, 2, 3, N+1). The first rule
that matches the request is applied.
You cannot configure two or more routeRules with the same priority.
Priority for each rule must be set to a number between 0 and
2147483647 inclusive.
Priority numbers can have gaps, which enable you to add or remove rules
in the future without affecting the rest of the rules. For example,
1, 2, 3, 4, 5, 9, 12, 16 is a valid series of priority numbers to which
you could add rules numbered from 6 to 8, 10 to 11, and 13 to 15 in the
future without any impact on existing rules.
:param 'RegionUrlMapPathMatcherRouteRuleHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService. headerAction specified here take effect before
headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
:param List['RegionUrlMapPathMatcherRouteRuleMatchRuleArgs'] match_rules: The rules for determining a match.
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionArgs' route_action: In response to a matching path, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
:param str service: A reference to expected RegionBackendService resource the given URL should be mapped to.
:param 'RegionUrlMapPathMatcherRouteRuleUrlRedirectArgs' url_redirect: When a path pattern is matched, the request is redirected to a URL specified
by urlRedirect. If urlRedirect is specified, service or routeAction must not
be set.
Structure is documented below.
"""
pulumi.set(__self__, "priority", priority)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
if match_rules is not None:
pulumi.set(__self__, "match_rules", match_rules)
if route_action is not None:
pulumi.set(__self__, "route_action", route_action)
if service is not None:
pulumi.set(__self__, "service", service)
if url_redirect is not None:
pulumi.set(__self__, "url_redirect", url_redirect)
@property
@pulumi.getter
def priority(self) -> float:
"""
For routeRules within a given pathMatcher, priority determines the order
in which load balancer will interpret routeRules. RouteRules are evaluated
in order of priority, from the lowest to highest number. The priority of
a rule decreases as its number increases (1, 2, 3, N+1). The first rule
that matches the request is applied.
You cannot configure two or more routeRules with the same priority.
Priority for each rule must be set to a number between 0 and
2147483647 inclusive.
Priority numbers can have gaps, which enable you to add or remove rules
in the future without affecting the rest of the rules. For example,
1, 2, 3, 4, 5, 9, 12, 16 is a valid series of priority numbers to which
you could add rules numbered from 6 to 8, 10 to 11, and 13 to 15 in the
future without any impact on existing rules.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService. headerAction specified here take effect before
headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
@property
@pulumi.getter(name="matchRules")
def match_rules(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRule']]:
"""
The rules for determining a match.
Structure is documented below.
"""
return pulumi.get(self, "match_rules")
@property
@pulumi.getter(name="routeAction")
def route_action(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteAction']:
"""
In response to a matching path, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
"""
return pulumi.get(self, "route_action")
@property
@pulumi.getter
def service(self) -> Optional[str]:
"""
A reference to expected RegionBackendService resource the given URL should be mapped to.
"""
return pulumi.get(self, "service")
@property
@pulumi.getter(name="urlRedirect")
def url_redirect(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleUrlRedirect']:
"""
When a path pattern is matched, the request is redirected to a URL specified
by urlRedirect. If urlRedirect is specified, service or routeAction must not
be set.
Structure is documented below.
"""
return pulumi.get(self, "url_redirect")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['RegionUrlMapPathMatcherRouteRuleHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the
backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request
prior to forwarding the request to the backendService.
:param List['RegionUrlMapPathMatcherRouteRuleHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response
prior to sending the response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the
backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request
prior to forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response
prior to sending the response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleMatchRule(dict):
def __init__(__self__, *,
full_path_match: Optional[str] = None,
header_matches: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatch']] = None,
ignore_case: Optional[bool] = None,
metadata_filters: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilter']] = None,
prefix_match: Optional[str] = None,
query_parameter_matches: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleQueryParameterMatch']] = None,
regex_match: Optional[str] = None):
"""
:param str full_path_match: For satifying the matchRule condition, the path of the request must exactly
match the value specified in fullPathMatch after removing any query parameters
and anchor that may be part of the original URL. FullPathMatch must be between 1
and 1024 characters. Only one of prefixMatch, fullPathMatch or regexMatch must
be specified.
:param List['RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatchArgs'] header_matches: Specifies a list of header match criteria, all of which must match corresponding
headers in the request.
Structure is documented below.
:param bool ignore_case: Specifies that prefixMatch and fullPathMatch matches are case sensitive.
Defaults to false.
:param List['RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilterArgs'] metadata_filters: Opaque filter criteria used by Loadbalancer to restrict routing configuration to
a limited set xDS compliant clients. In their xDS requests to Loadbalancer, xDS
clients present node metadata. If a match takes place, the relevant routing
configuration is made available to those proxies. For each metadataFilter in
this list, if its filterMatchCriteria is set to MATCH_ANY, at least one of the
filterLabels must match the corresponding label provided in the metadata. If its
filterMatchCriteria is set to MATCH_ALL, then all of its filterLabels must match
with corresponding labels in the provided metadata. metadataFilters specified
here can be overrides those specified in ForwardingRule that refers to this
UrlMap. metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
:param str prefix_match: The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
:param List['RegionUrlMapPathMatcherRouteRuleMatchRuleQueryParameterMatchArgs'] query_parameter_matches: Specifies a list of query parameter match criteria, all of which must match
corresponding query parameters in the request.
Structure is documented below.
:param str regex_match: The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
if full_path_match is not None:
pulumi.set(__self__, "full_path_match", full_path_match)
if header_matches is not None:
pulumi.set(__self__, "header_matches", header_matches)
if ignore_case is not None:
pulumi.set(__self__, "ignore_case", ignore_case)
if metadata_filters is not None:
pulumi.set(__self__, "metadata_filters", metadata_filters)
if prefix_match is not None:
pulumi.set(__self__, "prefix_match", prefix_match)
if query_parameter_matches is not None:
pulumi.set(__self__, "query_parameter_matches", query_parameter_matches)
if regex_match is not None:
pulumi.set(__self__, "regex_match", regex_match)
@property
@pulumi.getter(name="fullPathMatch")
def full_path_match(self) -> Optional[str]:
"""
For satifying the matchRule condition, the path of the request must exactly
match the value specified in fullPathMatch after removing any query parameters
and anchor that may be part of the original URL. FullPathMatch must be between 1
and 1024 characters. Only one of prefixMatch, fullPathMatch or regexMatch must
be specified.
"""
return pulumi.get(self, "full_path_match")
@property
@pulumi.getter(name="headerMatches")
def header_matches(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatch']]:
"""
Specifies a list of header match criteria, all of which must match corresponding
headers in the request.
Structure is documented below.
"""
return pulumi.get(self, "header_matches")
@property
@pulumi.getter(name="ignoreCase")
def ignore_case(self) -> Optional[bool]:
"""
Specifies that prefixMatch and fullPathMatch matches are case sensitive.
Defaults to false.
"""
return pulumi.get(self, "ignore_case")
@property
@pulumi.getter(name="metadataFilters")
def metadata_filters(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilter']]:
"""
Opaque filter criteria used by Loadbalancer to restrict routing configuration to
a limited set xDS compliant clients. In their xDS requests to Loadbalancer, xDS
clients present node metadata. If a match takes place, the relevant routing
configuration is made available to those proxies. For each metadataFilter in
this list, if its filterMatchCriteria is set to MATCH_ANY, at least one of the
filterLabels must match the corresponding label provided in the metadata. If its
filterMatchCriteria is set to MATCH_ALL, then all of its filterLabels must match
with corresponding labels in the provided metadata. metadataFilters specified
here can be overrides those specified in ForwardingRule that refers to this
UrlMap. metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
"""
return pulumi.get(self, "metadata_filters")
@property
@pulumi.getter(name="prefixMatch")
def prefix_match(self) -> Optional[str]:
"""
The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
return pulumi.get(self, "prefix_match")
@property
@pulumi.getter(name="queryParameterMatches")
def query_parameter_matches(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleQueryParameterMatch']]:
"""
Specifies a list of query parameter match criteria, all of which must match
corresponding query parameters in the request.
Structure is documented below.
"""
return pulumi.get(self, "query_parameter_matches")
@property
@pulumi.getter(name="regexMatch")
def regex_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "regex_match")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatch(dict):
def __init__(__self__, *,
header_name: str,
exact_match: Optional[str] = None,
invert_match: Optional[bool] = None,
prefix_match: Optional[str] = None,
present_match: Optional[bool] = None,
range_match: Optional['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch'] = None,
regex_match: Optional[str] = None,
suffix_match: Optional[str] = None):
"""
:param str header_name: The name of the header.
:param str exact_match: The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
:param bool invert_match: If set to false, the headerMatch is considered a match if the match criteria
above are met. If set to true, the headerMatch is considered a match if the
match criteria above are NOT met. Defaults to false.
:param str prefix_match: The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
:param bool present_match: Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
:param 'RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatchArgs' range_match: The header value must be an integer and its value must be in the range specified
in rangeMatch. If the header does not contain an integer, number or is empty,
the match fails. For example for a range [-5, 0] - -3 will match. - 0 will
not match. - 0.25 will not match. - -3someString will not match. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
Structure is documented below.
:param str regex_match: The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
:param str suffix_match: The value of the header must end with the contents of suffixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
pulumi.set(__self__, "header_name", header_name)
if exact_match is not None:
pulumi.set(__self__, "exact_match", exact_match)
if invert_match is not None:
pulumi.set(__self__, "invert_match", invert_match)
if prefix_match is not None:
pulumi.set(__self__, "prefix_match", prefix_match)
if present_match is not None:
pulumi.set(__self__, "present_match", present_match)
if range_match is not None:
pulumi.set(__self__, "range_match", range_match)
if regex_match is not None:
pulumi.set(__self__, "regex_match", regex_match)
if suffix_match is not None:
pulumi.set(__self__, "suffix_match", suffix_match)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="exactMatch")
def exact_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
"""
return pulumi.get(self, "exact_match")
@property
@pulumi.getter(name="invertMatch")
def invert_match(self) -> Optional[bool]:
"""
If set to false, the headerMatch is considered a match if the match criteria
above are met. If set to true, the headerMatch is considered a match if the
match criteria above are NOT met. Defaults to false.
"""
return pulumi.get(self, "invert_match")
@property
@pulumi.getter(name="prefixMatch")
def prefix_match(self) -> Optional[str]:
"""
The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
return pulumi.get(self, "prefix_match")
@property
@pulumi.getter(name="presentMatch")
def present_match(self) -> Optional[bool]:
"""
Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "present_match")
@property
@pulumi.getter(name="rangeMatch")
def range_match(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch']:
"""
The header value must be an integer and its value must be in the range specified
in rangeMatch. If the header does not contain an integer, number or is empty,
the match fails. For example for a range [-5, 0] - -3 will match. - 0 will
not match. - 0.25 will not match. - -3someString will not match. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
Structure is documented below.
"""
return pulumi.get(self, "range_match")
@property
@pulumi.getter(name="regexMatch")
def regex_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "regex_match")
@property
@pulumi.getter(name="suffixMatch")
def suffix_match(self) -> Optional[str]:
"""
The value of the header must end with the contents of suffixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
return pulumi.get(self, "suffix_match")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch(dict):
def __init__(__self__, *,
range_end: float,
range_start: float):
"""
:param float range_end: The end of the range (exclusive).
:param float range_start: The start of the range (inclusive).
"""
pulumi.set(__self__, "range_end", range_end)
pulumi.set(__self__, "range_start", range_start)
@property
@pulumi.getter(name="rangeEnd")
def range_end(self) -> float:
"""
The end of the range (exclusive).
"""
return pulumi.get(self, "range_end")
@property
@pulumi.getter(name="rangeStart")
def range_start(self) -> float:
"""
The start of the range (inclusive).
"""
return pulumi.get(self, "range_start")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilter(dict):
def __init__(__self__, *,
filter_labels: List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel'],
filter_match_criteria: str):
"""
:param List['RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabelArgs'] filter_labels: The list of label value pairs that must match labels in the provided metadata
based on filterMatchCriteria This list must not be empty and can have at the
most 64 entries.
Structure is documented below.
:param str filter_match_criteria: Specifies how individual filterLabel matches within the list of filterLabels
contribute towards the overall metadataFilter match. Supported values are:
- MATCH_ANY: At least one of the filterLabels must have a matching label in the
provided metadata.
- MATCH_ALL: All filterLabels must have matching labels in
the provided metadata.
Possible values are `MATCH_ALL` and `MATCH_ANY`.
"""
pulumi.set(__self__, "filter_labels", filter_labels)
pulumi.set(__self__, "filter_match_criteria", filter_match_criteria)
@property
@pulumi.getter(name="filterLabels")
def filter_labels(self) -> List['outputs.RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel']:
"""
The list of label value pairs that must match labels in the provided metadata
based on filterMatchCriteria This list must not be empty and can have at the
most 64 entries.
Structure is documented below.
"""
return pulumi.get(self, "filter_labels")
@property
@pulumi.getter(name="filterMatchCriteria")
def filter_match_criteria(self) -> str:
"""
Specifies how individual filterLabel matches within the list of filterLabels
contribute towards the overall metadataFilter match. Supported values are:
- MATCH_ANY: At least one of the filterLabels must have a matching label in the
provided metadata.
- MATCH_ALL: All filterLabels must have matching labels in
the provided metadata.
Possible values are `MATCH_ALL` and `MATCH_ANY`.
"""
return pulumi.get(self, "filter_match_criteria")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel(dict):
def __init__(__self__, *,
name: str,
value: str):
"""
:param str name: The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
:param str value: The value of the label must match the specified value. value can have a maximum
length of 1024 characters.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
The value of the label must match the specified value. value can have a maximum
length of 1024 characters.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleMatchRuleQueryParameterMatch(dict):
def __init__(__self__, *,
name: str,
exact_match: Optional[str] = None,
present_match: Optional[bool] = None,
regex_match: Optional[str] = None):
"""
:param str name: The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
:param str exact_match: The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
:param bool present_match: Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
:param str regex_match: The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
pulumi.set(__self__, "name", name)
if exact_match is not None:
pulumi.set(__self__, "exact_match", exact_match)
if present_match is not None:
pulumi.set(__self__, "present_match", present_match)
if regex_match is not None:
pulumi.set(__self__, "regex_match", regex_match)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="exactMatch")
def exact_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
"""
return pulumi.get(self, "exact_match")
@property
@pulumi.getter(name="presentMatch")
def present_match(self) -> Optional[bool]:
"""
Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "present_match")
@property
@pulumi.getter(name="regexMatch")
def regex_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "regex_match")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteAction(dict):
def __init__(__self__, *,
cors_policy: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionCorsPolicy'] = None,
fault_injection_policy: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy'] = None,
request_mirror_policy: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy'] = None,
retry_policy: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicy'] = None,
timeout: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionTimeout'] = None,
url_rewrite: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionUrlRewrite'] = None,
weighted_backend_services: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendService']] = None):
"""
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionCorsPolicyArgs' cors_policy: The specification for allowing client side cross-origin requests. Please see W3C
Recommendation for Cross Origin Resource Sharing
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyArgs' fault_injection_policy: The specification for fault injection introduced into traffic to test the
resiliency of clients to backend service failure. As part of fault injection,
when clients send requests to a backend service, delays can be introduced by
Loadbalancer on a percentage of requests before sending those request to the
backend service. Similarly requests from clients can be aborted by the
Loadbalancer for a percentage of requests. timeout and retry_policy will be
ignored by clients that are configured with a fault_injection_policy.
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionRequestMirrorPolicyArgs' request_mirror_policy: Specifies the policy on how requests intended for the route's backends are
shadowed to a separate mirrored backend service. Loadbalancer does not wait for
responses from the shadow service. Prior to sending traffic to the shadow
service, the host / authority header is suffixed with -shadow.
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicyArgs' retry_policy: Specifies the retry policy associated with this route.
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionTimeoutArgs' timeout: Specifies the timeout for the selected route. Timeout is computed from the time
the request is has been fully processed (i.e. end-of-stream) up until the
response has been completely processed. Timeout includes all retries. If not
specified, the default value is 15 seconds.
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionUrlRewriteArgs' url_rewrite: The spec to modify the URL of the request, prior to forwarding the request to
the matched service
Structure is documented below.
:param List['RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceArgs'] weighted_backend_services: A list of weighted backend services to send traffic to when a route match
occurs. The weights determine the fraction of traffic that flows to their
corresponding backend service. If all traffic needs to go to a single backend
service, there must be one weightedBackendService with weight set to a non 0
number. Once a backendService is identified and before forwarding the request to
the backend service, advanced routing actions like Url rewrites and header
transformations are applied depending on additional settings specified in this
HttpRouteAction.
Structure is documented below.
"""
if cors_policy is not None:
pulumi.set(__self__, "cors_policy", cors_policy)
if fault_injection_policy is not None:
pulumi.set(__self__, "fault_injection_policy", fault_injection_policy)
if request_mirror_policy is not None:
pulumi.set(__self__, "request_mirror_policy", request_mirror_policy)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if url_rewrite is not None:
pulumi.set(__self__, "url_rewrite", url_rewrite)
if weighted_backend_services is not None:
pulumi.set(__self__, "weighted_backend_services", weighted_backend_services)
@property
@pulumi.getter(name="corsPolicy")
def cors_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionCorsPolicy']:
"""
The specification for allowing client side cross-origin requests. Please see W3C
Recommendation for Cross Origin Resource Sharing
Structure is documented below.
"""
return pulumi.get(self, "cors_policy")
@property
@pulumi.getter(name="faultInjectionPolicy")
def fault_injection_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy']:
"""
The specification for fault injection introduced into traffic to test the
resiliency of clients to backend service failure. As part of fault injection,
when clients send requests to a backend service, delays can be introduced by
Loadbalancer on a percentage of requests before sending those request to the
backend service. Similarly requests from clients can be aborted by the
Loadbalancer for a percentage of requests. timeout and retry_policy will be
ignored by clients that are configured with a fault_injection_policy.
Structure is documented below.
"""
return pulumi.get(self, "fault_injection_policy")
@property
@pulumi.getter(name="requestMirrorPolicy")
def request_mirror_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy']:
"""
Specifies the policy on how requests intended for the route's backends are
shadowed to a separate mirrored backend service. Loadbalancer does not wait for
responses from the shadow service. Prior to sending traffic to the shadow
service, the host / authority header is suffixed with -shadow.
Structure is documented below.
"""
return pulumi.get(self, "request_mirror_policy")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicy']:
"""
Specifies the retry policy associated with this route.
Structure is documented below.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def timeout(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionTimeout']:
"""
Specifies the timeout for the selected route. Timeout is computed from the time
the request is has been fully processed (i.e. end-of-stream) up until the
response has been completely processed. Timeout includes all retries. If not
specified, the default value is 15 seconds.
Structure is documented below.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="urlRewrite")
def url_rewrite(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionUrlRewrite']:
"""
The spec to modify the URL of the request, prior to forwarding the request to
the matched service
Structure is documented below.
"""
return pulumi.get(self, "url_rewrite")
@property
@pulumi.getter(name="weightedBackendServices")
def weighted_backend_services(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendService']]:
"""
A list of weighted backend services to send traffic to when a route match
occurs. The weights determine the fraction of traffic that flows to their
corresponding backend service. If all traffic needs to go to a single backend
service, there must be one weightedBackendService with weight set to a non 0
number. Once a backendService is identified and before forwarding the request to
the backend service, advanced routing actions like Url rewrites and header
transformations are applied depending on additional settings specified in this
HttpRouteAction.
Structure is documented below.
"""
return pulumi.get(self, "weighted_backend_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionCorsPolicy(dict):
def __init__(__self__, *,
allow_credentials: Optional[bool] = None,
allow_headers: Optional[List[str]] = None,
allow_methods: Optional[List[str]] = None,
allow_origin_regexes: Optional[List[str]] = None,
allow_origins: Optional[List[str]] = None,
disabled: Optional[bool] = None,
expose_headers: Optional[List[str]] = None,
max_age: Optional[float] = None):
"""
:param bool allow_credentials: In response to a preflight request, setting this to true indicates that the
actual request can include user credentials. This translates to the Access-
Control-Allow-Credentials header. Defaults to false.
:param List[str] allow_headers: Specifies the content for the Access-Control-Allow-Headers header.
:param List[str] allow_methods: Specifies the content for the Access-Control-Allow-Methods header.
:param List[str] allow_origin_regexes: Specifies the regualar expression patterns that match allowed origins. For
regular expression grammar please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either allow_origins or allow_origin_regex.
:param List[str] allow_origins: Specifies the list of origins that will be allowed to do CORS requests. An
origin is allowed if it matches either allow_origins or allow_origin_regex.
:param bool disabled: If true, specifies the CORS policy is disabled.
:param List[str] expose_headers: Specifies the content for the Access-Control-Expose-Headers header.
:param float max_age: Specifies how long the results of a preflight request can be cached. This
translates to the content for the Access-Control-Max-Age header.
"""
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if allow_headers is not None:
pulumi.set(__self__, "allow_headers", allow_headers)
if allow_methods is not None:
pulumi.set(__self__, "allow_methods", allow_methods)
if allow_origin_regexes is not None:
pulumi.set(__self__, "allow_origin_regexes", allow_origin_regexes)
if allow_origins is not None:
pulumi.set(__self__, "allow_origins", allow_origins)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if expose_headers is not None:
pulumi.set(__self__, "expose_headers", expose_headers)
if max_age is not None:
pulumi.set(__self__, "max_age", max_age)
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
In response to a preflight request, setting this to true indicates that the
actual request can include user credentials. This translates to the Access-
Control-Allow-Credentials header. Defaults to false.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter(name="allowHeaders")
def allow_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Headers header.
"""
return pulumi.get(self, "allow_headers")
@property
@pulumi.getter(name="allowMethods")
def allow_methods(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Methods header.
"""
return pulumi.get(self, "allow_methods")
@property
@pulumi.getter(name="allowOriginRegexes")
def allow_origin_regexes(self) -> Optional[List[str]]:
"""
Specifies the regualar expression patterns that match allowed origins. For
regular expression grammar please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either allow_origins or allow_origin_regex.
"""
return pulumi.get(self, "allow_origin_regexes")
@property
@pulumi.getter(name="allowOrigins")
def allow_origins(self) -> Optional[List[str]]:
"""
Specifies the list of origins that will be allowed to do CORS requests. An
origin is allowed if it matches either allow_origins or allow_origin_regex.
"""
return pulumi.get(self, "allow_origins")
@property
@pulumi.getter
def disabled(self) -> Optional[bool]:
"""
If true, specifies the CORS policy is disabled.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="exposeHeaders")
def expose_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Expose-Headers header.
"""
return pulumi.get(self, "expose_headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[float]:
"""
Specifies how long the results of a preflight request can be cached. This
translates to the content for the Access-Control-Max-Age header.
"""
return pulumi.get(self, "max_age")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy(dict):
def __init__(__self__, *,
abort: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort'] = None,
delay: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay'] = None):
"""
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbortArgs' abort: The specification for how client requests are aborted as part of fault
injection.
Structure is documented below.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayArgs' delay: The specification for how client requests are delayed as part of fault
injection, before being sent to a backend service.
Structure is documented below.
"""
if abort is not None:
pulumi.set(__self__, "abort", abort)
if delay is not None:
pulumi.set(__self__, "delay", delay)
@property
@pulumi.getter
def abort(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort']:
"""
The specification for how client requests are aborted as part of fault
injection.
Structure is documented below.
"""
return pulumi.get(self, "abort")
@property
@pulumi.getter
def delay(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay']:
"""
The specification for how client requests are delayed as part of fault
injection, before being sent to a backend service.
Structure is documented below.
"""
return pulumi.get(self, "delay")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort(dict):
def __init__(__self__, *,
http_status: Optional[float] = None,
percentage: Optional[float] = None):
"""
:param float http_status: The HTTP status code used to abort the request. The value must be between 200
and 599 inclusive.
:param float percentage: The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
if http_status is not None:
pulumi.set(__self__, "http_status", http_status)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="httpStatus")
def http_status(self) -> Optional[float]:
"""
The HTTP status code used to abort the request. The value must be between 200
and 599 inclusive.
"""
return pulumi.get(self, "http_status")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay(dict):
def __init__(__self__, *,
fixed_delay: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay'] = None,
percentage: Optional[float] = None):
"""
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelayArgs' fixed_delay: Specifies the value of the fixed delay interval.
Structure is documented below.
:param float percentage: The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
if fixed_delay is not None:
pulumi.set(__self__, "fixed_delay", fixed_delay)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="fixedDelay")
def fixed_delay(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay']:
"""
Specifies the value of the fixed delay interval.
Structure is documented below.
"""
return pulumi.get(self, "fixed_delay")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) on which delay will
be introduced as part of fault injection. The value must be between 0.0 and
100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy(dict):
def __init__(__self__, *,
backend_service: str):
"""
:param str backend_service: The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
"""
pulumi.set(__self__, "backend_service", backend_service)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
"""
return pulumi.get(self, "backend_service")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicy(dict):
def __init__(__self__, *,
num_retries: float,
per_try_timeout: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout'] = None,
retry_conditions: Optional[List[str]] = None):
"""
:param float num_retries: Specifies the allowed number retries. This number must be > 0.
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeoutArgs' per_try_timeout: Specifies a non-zero timeout per retry attempt.
Structure is documented below.
:param List[str] retry_conditions: Specifies one or more conditions when this retry rule applies. Valid values are:
- 5xx: Loadbalancer will attempt a retry if the backend service responds with
any 5xx response code, or if the backend service does not respond at all,
example: disconnects, reset, read timeout, connection failure, and refused
streams.
- gateway-error: Similar to 5xx, but only applies to response codes
502, 503 or 504.
- connect-failure: Loadbalancer will retry on failures
connecting to backend services, for example due to connection timeouts.
- retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
- refused-stream: Loadbalancer will retry if the backend service resets the stream with a
REFUSED_STREAM error code. This reset type indicates that it is safe to retry.
- cancelled: Loadbalancer will retry if the gRPC status code in the response
header is set to cancelled
- deadline-exceeded: Loadbalancer will retry if the
gRPC status code in the response header is set to deadline-exceeded
- resource-exhausted: Loadbalancer will retry if the gRPC status code in the response
header is set to resource-exhausted
- unavailable: Loadbalancer will retry if
the gRPC status code in the response header is set to unavailable
"""
pulumi.set(__self__, "num_retries", num_retries)
if per_try_timeout is not None:
pulumi.set(__self__, "per_try_timeout", per_try_timeout)
if retry_conditions is not None:
pulumi.set(__self__, "retry_conditions", retry_conditions)
@property
@pulumi.getter(name="numRetries")
def num_retries(self) -> float:
"""
Specifies the allowed number retries. This number must be > 0.
"""
return pulumi.get(self, "num_retries")
@property
@pulumi.getter(name="perTryTimeout")
def per_try_timeout(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout']:
"""
Specifies a non-zero timeout per retry attempt.
Structure is documented below.
"""
return pulumi.get(self, "per_try_timeout")
@property
@pulumi.getter(name="retryConditions")
def retry_conditions(self) -> Optional[List[str]]:
"""
Specifies one or more conditions when this retry rule applies. Valid values are:
- 5xx: Loadbalancer will attempt a retry if the backend service responds with
any 5xx response code, or if the backend service does not respond at all,
example: disconnects, reset, read timeout, connection failure, and refused
streams.
- gateway-error: Similar to 5xx, but only applies to response codes
502, 503 or 504.
- connect-failure: Loadbalancer will retry on failures
connecting to backend services, for example due to connection timeouts.
- retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
- refused-stream: Loadbalancer will retry if the backend service resets the stream with a
REFUSED_STREAM error code. This reset type indicates that it is safe to retry.
- cancelled: Loadbalancer will retry if the gRPC status code in the response
header is set to cancelled
- deadline-exceeded: Loadbalancer will retry if the
gRPC status code in the response header is set to deadline-exceeded
- resource-exhausted: Loadbalancer will retry if the gRPC status code in the response
header is set to resource-exhausted
- unavailable: Loadbalancer will retry if
the gRPC status code in the response header is set to unavailable
"""
return pulumi.get(self, "retry_conditions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000
inclusive.
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations
less than one second are represented with a 0 `seconds` field and a positive
`nanos` field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionUrlRewrite(dict):
def __init__(__self__, *,
host_rewrite: Optional[str] = None,
path_prefix_rewrite: Optional[str] = None):
"""
:param str host_rewrite: Prior to forwarding the request to the selected service, the request's host
header is replaced with contents of hostRewrite. The value must be between 1 and
255 characters.
:param str path_prefix_rewrite: Prior to forwarding the request to the selected backend service, the matching
portion of the request's path is replaced by pathPrefixRewrite. The value must
be between 1 and 1024 characters.
"""
if host_rewrite is not None:
pulumi.set(__self__, "host_rewrite", host_rewrite)
if path_prefix_rewrite is not None:
pulumi.set(__self__, "path_prefix_rewrite", path_prefix_rewrite)
@property
@pulumi.getter(name="hostRewrite")
def host_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected service, the request's host
header is replaced with contents of hostRewrite. The value must be between 1 and
255 characters.
"""
return pulumi.get(self, "host_rewrite")
@property
@pulumi.getter(name="pathPrefixRewrite")
def path_prefix_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected backend service, the matching
portion of the request's path is replaced by pathPrefixRewrite. The value must
be between 1 and 1024 characters.
"""
return pulumi.get(self, "path_prefix_rewrite")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendService(dict):
def __init__(__self__, *,
backend_service: str,
weight: float,
header_action: Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction'] = None):
"""
:param str backend_service: The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
:param float weight: Specifies the fraction of traffic sent to backendService, computed as weight /
(sum of all weightedBackendService weights in routeAction) . The selection of a
backend service is determined only for new traffic. Once a user's request has
been directed to a backendService, subsequent requests will be sent to the same
backendService as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
:param 'RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService. headerAction specified here take effect before
headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
pulumi.set(__self__, "backend_service", backend_service)
pulumi.set(__self__, "weight", weight)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The default RegionBackendService resource. Before
forwarding the request to backendService, the loadbalancer applies any relevant
headerActions specified as part of this backendServiceWeight.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter
def weight(self) -> float:
"""
Specifies the fraction of traffic sent to backendService, computed as weight /
(sum of all weightedBackendService weights in routeAction) . The selection of a
backend service is determined only for new traffic. Once a user's request has
been directed to a backendService, subsequent requests will be sent to the same
backendService as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
return pulumi.get(self, "weight")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService. headerAction specified here take effect before
headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the
backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request
prior to forwarding the request to the backendService.
:param List['RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response
prior to sending the response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the
backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request
prior to forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response
prior to sending the response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the
header. If true, headerValue is set for the header, discarding any values that
were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapPathMatcherRouteRuleUrlRedirect(dict):
def __init__(__self__, *,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None,
strip_query: Optional[bool] = None):
"""
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
if strip_query is not None:
pulumi.set(__self__, "strip_query", strip_query)
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> Optional[bool]:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RegionUrlMapTest(dict):
def __init__(__self__, *,
host: str,
path: str,
service: str,
description: Optional[str] = None):
"""
:param str host: Host portion of the URL.
:param str path: Path portion of the URL.
:param str service: A reference to expected RegionBackendService resource the given URL should be mapped to.
:param str description: Description of this test case.
"""
pulumi.set(__self__, "host", host)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "service", service)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def host(self) -> str:
"""
Host portion of the URL.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> str:
"""
Path portion of the URL.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def service(self) -> str:
"""
A reference to expected RegionBackendService resource the given URL should be mapped to.
"""
return pulumi.get(self, "service")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of this test case.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ReservationSpecificReservation(dict):
def __init__(__self__, *,
count: float,
instance_properties: 'outputs.ReservationSpecificReservationInstanceProperties',
in_use_count: Optional[float] = None):
"""
:param float count: The number of resources that are allocated.
:param 'ReservationSpecificReservationInstancePropertiesArgs' instance_properties: The instance properties for the reservation.
Structure is documented below.
:param float in_use_count: -
How many instances are in use.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "instance_properties", instance_properties)
if in_use_count is not None:
pulumi.set(__self__, "in_use_count", in_use_count)
@property
@pulumi.getter
def count(self) -> float:
"""
The number of resources that are allocated.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="instanceProperties")
def instance_properties(self) -> 'outputs.ReservationSpecificReservationInstanceProperties':
"""
The instance properties for the reservation.
Structure is documented below.
"""
return pulumi.get(self, "instance_properties")
@property
@pulumi.getter(name="inUseCount")
def in_use_count(self) -> Optional[float]:
"""
-
How many instances are in use.
"""
return pulumi.get(self, "in_use_count")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ReservationSpecificReservationInstanceProperties(dict):
def __init__(__self__, *,
machine_type: str,
guest_accelerators: Optional[List['outputs.ReservationSpecificReservationInstancePropertiesGuestAccelerator']] = None,
local_ssds: Optional[List['outputs.ReservationSpecificReservationInstancePropertiesLocalSsd']] = None,
min_cpu_platform: Optional[str] = None):
"""
:param str machine_type: The name of the machine type to reserve.
:param List['ReservationSpecificReservationInstancePropertiesGuestAcceleratorArgs'] guest_accelerators: Guest accelerator type and count.
Structure is documented below.
:param List['ReservationSpecificReservationInstancePropertiesLocalSsdArgs'] local_ssds: The amount of local ssd to reserve with each instance. This
reserves disks of type `local-ssd`.
Structure is documented below.
:param str min_cpu_platform: The minimum CPU platform for the reservation. For example,
`"Intel Skylake"`. See
the CPU platform availability reference](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform#availablezones)
for information on available CPU platforms.
"""
pulumi.set(__self__, "machine_type", machine_type)
if guest_accelerators is not None:
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
if local_ssds is not None:
pulumi.set(__self__, "local_ssds", local_ssds)
if min_cpu_platform is not None:
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
"""
The name of the machine type to reserve.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Optional[List['outputs.ReservationSpecificReservationInstancePropertiesGuestAccelerator']]:
"""
Guest accelerator type and count.
Structure is documented below.
"""
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter(name="localSsds")
def local_ssds(self) -> Optional[List['outputs.ReservationSpecificReservationInstancePropertiesLocalSsd']]:
"""
The amount of local ssd to reserve with each instance. This
reserves disks of type `local-ssd`.
Structure is documented below.
"""
return pulumi.get(self, "local_ssds")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> Optional[str]:
"""
The minimum CPU platform for the reservation. For example,
`"Intel Skylake"`. See
the CPU platform availability reference](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform#availablezones)
for information on available CPU platforms.
"""
return pulumi.get(self, "min_cpu_platform")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ReservationSpecificReservationInstancePropertiesGuestAccelerator(dict):
def __init__(__self__, *,
accelerator_count: float,
accelerator_type: str):
"""
:param float accelerator_count: The number of the guest accelerator cards exposed to
this instance.
:param str accelerator_type: The full or partial URL of the accelerator type to
attach to this instance. For example:
`projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100`
If you are creating an instance template, specify only the accelerator name.
"""
pulumi.set(__self__, "accelerator_count", accelerator_count)
pulumi.set(__self__, "accelerator_type", accelerator_type)
@property
@pulumi.getter(name="acceleratorCount")
def accelerator_count(self) -> float:
"""
The number of the guest accelerator cards exposed to
this instance.
"""
return pulumi.get(self, "accelerator_count")
@property
@pulumi.getter(name="acceleratorType")
def accelerator_type(self) -> str:
"""
The full or partial URL of the accelerator type to
attach to this instance. For example:
`projects/my-project/zones/us-central1-c/acceleratorTypes/nvidia-tesla-p100`
If you are creating an instance template, specify only the accelerator name.
"""
return pulumi.get(self, "accelerator_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ReservationSpecificReservationInstancePropertiesLocalSsd(dict):
def __init__(__self__, *,
disk_size_gb: float,
interface: Optional[str] = None):
"""
:param float disk_size_gb: The size of the disk in base-2 GB.
:param str interface: The disk interface to use for attaching this disk.
Default value is `SCSI`.
Possible values are `SCSI` and `NVME`.
"""
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if interface is not None:
pulumi.set(__self__, "interface", interface)
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> float:
"""
The size of the disk in base-2 GB.
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter
def interface(self) -> Optional[str]:
"""
The disk interface to use for attaching this disk.
Default value is `SCSI`.
Possible values are `SCSI` and `NVME`.
"""
return pulumi.get(self, "interface")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicyGroupPlacementPolicy(dict):
def __init__(__self__, *,
availability_domain_count: Optional[float] = None,
collocation: Optional[str] = None,
vm_count: Optional[float] = None):
"""
:param float availability_domain_count: The number of availability domains instances will be spread across. If two instances are in different
availability domain, they will not be put in the same low latency network
:param str collocation: Collocation specifies whether to place VMs inside the same availability domain on the same low-latency network.
Specify `COLLOCATED` to enable collocation. Can only be specified with `vm_count`. If compute instances are created
with a COLLOCATED policy, then exactly `vm_count` instances must be created at the same time with the resource policy
attached.
Possible values are `COLLOCATED`.
:param float vm_count: Number of vms in this placement group.
"""
if availability_domain_count is not None:
pulumi.set(__self__, "availability_domain_count", availability_domain_count)
if collocation is not None:
pulumi.set(__self__, "collocation", collocation)
if vm_count is not None:
pulumi.set(__self__, "vm_count", vm_count)
@property
@pulumi.getter(name="availabilityDomainCount")
def availability_domain_count(self) -> Optional[float]:
"""
The number of availability domains instances will be spread across. If two instances are in different
availability domain, they will not be put in the same low latency network
"""
return pulumi.get(self, "availability_domain_count")
@property
@pulumi.getter
def collocation(self) -> Optional[str]:
"""
Collocation specifies whether to place VMs inside the same availability domain on the same low-latency network.
Specify `COLLOCATED` to enable collocation. Can only be specified with `vm_count`. If compute instances are created
with a COLLOCATED policy, then exactly `vm_count` instances must be created at the same time with the resource policy
attached.
Possible values are `COLLOCATED`.
"""
return pulumi.get(self, "collocation")
@property
@pulumi.getter(name="vmCount")
def vm_count(self) -> Optional[float]:
"""
Number of vms in this placement group.
"""
return pulumi.get(self, "vm_count")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicy(dict):
def __init__(__self__, *,
schedule: 'outputs.ResourcePolicySnapshotSchedulePolicySchedule',
retention_policy: Optional['outputs.ResourcePolicySnapshotSchedulePolicyRetentionPolicy'] = None,
snapshot_properties: Optional['outputs.ResourcePolicySnapshotSchedulePolicySnapshotProperties'] = None):
"""
:param 'ResourcePolicySnapshotSchedulePolicyScheduleArgs' schedule: Contains one of an `hourlySchedule`, `dailySchedule`, or `weeklySchedule`.
Structure is documented below.
:param 'ResourcePolicySnapshotSchedulePolicyRetentionPolicyArgs' retention_policy: Retention policy applied to snapshots created by this resource policy.
Structure is documented below.
:param 'ResourcePolicySnapshotSchedulePolicySnapshotPropertiesArgs' snapshot_properties: Properties with which the snapshots are created, such as labels.
Structure is documented below.
"""
pulumi.set(__self__, "schedule", schedule)
if retention_policy is not None:
pulumi.set(__self__, "retention_policy", retention_policy)
if snapshot_properties is not None:
pulumi.set(__self__, "snapshot_properties", snapshot_properties)
@property
@pulumi.getter
def schedule(self) -> 'outputs.ResourcePolicySnapshotSchedulePolicySchedule':
"""
Contains one of an `hourlySchedule`, `dailySchedule`, or `weeklySchedule`.
Structure is documented below.
"""
return pulumi.get(self, "schedule")
@property
@pulumi.getter(name="retentionPolicy")
def retention_policy(self) -> Optional['outputs.ResourcePolicySnapshotSchedulePolicyRetentionPolicy']:
"""
Retention policy applied to snapshots created by this resource policy.
Structure is documented below.
"""
return pulumi.get(self, "retention_policy")
@property
@pulumi.getter(name="snapshotProperties")
def snapshot_properties(self) -> Optional['outputs.ResourcePolicySnapshotSchedulePolicySnapshotProperties']:
"""
Properties with which the snapshots are created, such as labels.
Structure is documented below.
"""
return pulumi.get(self, "snapshot_properties")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicyRetentionPolicy(dict):
def __init__(__self__, *,
max_retention_days: float,
on_source_disk_delete: Optional[str] = None):
"""
:param float max_retention_days: Maximum age of the snapshot that is allowed to be kept.
:param str on_source_disk_delete: Specifies the behavior to apply to scheduled snapshots when
the source disk is deleted.
Default value is `KEEP_AUTO_SNAPSHOTS`.
Possible values are `KEEP_AUTO_SNAPSHOTS` and `APPLY_RETENTION_POLICY`.
"""
pulumi.set(__self__, "max_retention_days", max_retention_days)
if on_source_disk_delete is not None:
pulumi.set(__self__, "on_source_disk_delete", on_source_disk_delete)
@property
@pulumi.getter(name="maxRetentionDays")
def max_retention_days(self) -> float:
"""
Maximum age of the snapshot that is allowed to be kept.
"""
return pulumi.get(self, "max_retention_days")
@property
@pulumi.getter(name="onSourceDiskDelete")
def on_source_disk_delete(self) -> Optional[str]:
"""
Specifies the behavior to apply to scheduled snapshots when
the source disk is deleted.
Default value is `KEEP_AUTO_SNAPSHOTS`.
Possible values are `KEEP_AUTO_SNAPSHOTS` and `APPLY_RETENTION_POLICY`.
"""
return pulumi.get(self, "on_source_disk_delete")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicySchedule(dict):
def __init__(__self__, *,
daily_schedule: Optional['outputs.ResourcePolicySnapshotSchedulePolicyScheduleDailySchedule'] = None,
hourly_schedule: Optional['outputs.ResourcePolicySnapshotSchedulePolicyScheduleHourlySchedule'] = None,
weekly_schedule: Optional['outputs.ResourcePolicySnapshotSchedulePolicyScheduleWeeklySchedule'] = None):
"""
:param 'ResourcePolicySnapshotSchedulePolicyScheduleDailyScheduleArgs' daily_schedule: The policy will execute every nth day at the specified time.
Structure is documented below.
:param 'ResourcePolicySnapshotSchedulePolicyScheduleHourlyScheduleArgs' hourly_schedule: The policy will execute every nth hour starting at the specified time.
Structure is documented below.
:param 'ResourcePolicySnapshotSchedulePolicyScheduleWeeklyScheduleArgs' weekly_schedule: Allows specifying a snapshot time for each day of the week.
Structure is documented below.
"""
if daily_schedule is not None:
pulumi.set(__self__, "daily_schedule", daily_schedule)
if hourly_schedule is not None:
pulumi.set(__self__, "hourly_schedule", hourly_schedule)
if weekly_schedule is not None:
pulumi.set(__self__, "weekly_schedule", weekly_schedule)
@property
@pulumi.getter(name="dailySchedule")
def daily_schedule(self) -> Optional['outputs.ResourcePolicySnapshotSchedulePolicyScheduleDailySchedule']:
"""
The policy will execute every nth day at the specified time.
Structure is documented below.
"""
return pulumi.get(self, "daily_schedule")
@property
@pulumi.getter(name="hourlySchedule")
def hourly_schedule(self) -> Optional['outputs.ResourcePolicySnapshotSchedulePolicyScheduleHourlySchedule']:
"""
The policy will execute every nth hour starting at the specified time.
Structure is documented below.
"""
return pulumi.get(self, "hourly_schedule")
@property
@pulumi.getter(name="weeklySchedule")
def weekly_schedule(self) -> Optional['outputs.ResourcePolicySnapshotSchedulePolicyScheduleWeeklySchedule']:
"""
Allows specifying a snapshot time for each day of the week.
Structure is documented below.
"""
return pulumi.get(self, "weekly_schedule")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicyScheduleDailySchedule(dict):
def __init__(__self__, *,
days_in_cycle: float,
start_time: str):
"""
:param float days_in_cycle: The number of days between snapshots.
:param str start_time: Time within the window to start the operations.
It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT.
"""
pulumi.set(__self__, "days_in_cycle", days_in_cycle)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="daysInCycle")
def days_in_cycle(self) -> float:
"""
The number of days between snapshots.
"""
return pulumi.get(self, "days_in_cycle")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
Time within the window to start the operations.
It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT.
"""
return pulumi.get(self, "start_time")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicyScheduleHourlySchedule(dict):
def __init__(__self__, *,
hours_in_cycle: float,
start_time: str):
"""
:param float hours_in_cycle: The number of hours between snapshots.
:param str start_time: Time within the window to start the operations.
It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT.
"""
pulumi.set(__self__, "hours_in_cycle", hours_in_cycle)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="hoursInCycle")
def hours_in_cycle(self) -> float:
"""
The number of hours between snapshots.
"""
return pulumi.get(self, "hours_in_cycle")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
Time within the window to start the operations.
It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT.
"""
return pulumi.get(self, "start_time")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicyScheduleWeeklySchedule(dict):
def __init__(__self__, *,
day_of_weeks: List['outputs.ResourcePolicySnapshotSchedulePolicyScheduleWeeklyScheduleDayOfWeek']):
"""
:param List['ResourcePolicySnapshotSchedulePolicyScheduleWeeklyScheduleDayOfWeekArgs'] day_of_weeks: May contain up to seven (one for each day of the week) snapshot times.
Structure is documented below.
"""
pulumi.set(__self__, "day_of_weeks", day_of_weeks)
@property
@pulumi.getter(name="dayOfWeeks")
def day_of_weeks(self) -> List['outputs.ResourcePolicySnapshotSchedulePolicyScheduleWeeklyScheduleDayOfWeek']:
"""
May contain up to seven (one for each day of the week) snapshot times.
Structure is documented below.
"""
return pulumi.get(self, "day_of_weeks")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicyScheduleWeeklyScheduleDayOfWeek(dict):
def __init__(__self__, *,
day: str,
start_time: str):
"""
:param str day: The day of the week to create the snapshot. e.g. MONDAY
Possible values are `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.
:param str start_time: Time within the window to start the operations.
It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT.
"""
pulumi.set(__self__, "day", day)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter
def day(self) -> str:
"""
The day of the week to create the snapshot. e.g. MONDAY
Possible values are `MONDAY`, `TUESDAY`, `WEDNESDAY`, `THURSDAY`, `FRIDAY`, `SATURDAY`, and `SUNDAY`.
"""
return pulumi.get(self, "day")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
Time within the window to start the operations.
It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT.
"""
return pulumi.get(self, "start_time")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourcePolicySnapshotSchedulePolicySnapshotProperties(dict):
def __init__(__self__, *,
guest_flush: Optional[bool] = None,
labels: Optional[Mapping[str, str]] = None,
storage_locations: Optional[str] = None):
"""
:param bool guest_flush: Whether to perform a 'guest aware' snapshot.
:param Mapping[str, str] labels: A set of key-value pairs.
:param str storage_locations: Cloud Storage bucket location to store the auto snapshot
(regional or multi-regional)
"""
if guest_flush is not None:
pulumi.set(__self__, "guest_flush", guest_flush)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if storage_locations is not None:
pulumi.set(__self__, "storage_locations", storage_locations)
@property
@pulumi.getter(name="guestFlush")
def guest_flush(self) -> Optional[bool]:
"""
Whether to perform a 'guest aware' snapshot.
"""
return pulumi.get(self, "guest_flush")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
A set of key-value pairs.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="storageLocations")
def storage_locations(self) -> Optional[str]:
"""
Cloud Storage bucket location to store the auto snapshot
(regional or multi-regional)
"""
return pulumi.get(self, "storage_locations")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RouterBgp(dict):
def __init__(__self__, *,
asn: float,
advertise_mode: Optional[str] = None,
advertised_groups: Optional[List[str]] = None,
advertised_ip_ranges: Optional[List['outputs.RouterBgpAdvertisedIpRange']] = None):
"""
:param float asn: Local BGP Autonomous System Number (ASN). Must be an RFC6996
private ASN, either 16-bit or 32-bit. The value will be fixed for
this router resource. All VPN tunnels that link to this router
will have the same local ASN.
:param str advertise_mode: User-specified flag to indicate which mode to use for advertisement.
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
:param List[str] advertised_groups: User-specified list of prefix groups to advertise in custom mode.
This field can only be populated if advertiseMode is CUSTOM and
is advertised to all peers of the router. These groups will be
advertised in addition to any specified prefixes. Leave this field
blank to advertise no custom groups.
This enum field has the one valid value: ALL_SUBNETS
:param List['RouterBgpAdvertisedIpRangeArgs'] advertised_ip_ranges: User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is CUSTOM and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
"""
pulumi.set(__self__, "asn", asn)
if advertise_mode is not None:
pulumi.set(__self__, "advertise_mode", advertise_mode)
if advertised_groups is not None:
pulumi.set(__self__, "advertised_groups", advertised_groups)
if advertised_ip_ranges is not None:
pulumi.set(__self__, "advertised_ip_ranges", advertised_ip_ranges)
@property
@pulumi.getter
def asn(self) -> float:
"""
Local BGP Autonomous System Number (ASN). Must be an RFC6996
private ASN, either 16-bit or 32-bit. The value will be fixed for
this router resource. All VPN tunnels that link to this router
will have the same local ASN.
"""
return pulumi.get(self, "asn")
@property
@pulumi.getter(name="advertiseMode")
def advertise_mode(self) -> Optional[str]:
"""
User-specified flag to indicate which mode to use for advertisement.
Default value is `DEFAULT`.
Possible values are `DEFAULT` and `CUSTOM`.
"""
return pulumi.get(self, "advertise_mode")
@property
@pulumi.getter(name="advertisedGroups")
def advertised_groups(self) -> Optional[List[str]]:
"""
User-specified list of prefix groups to advertise in custom mode.
This field can only be populated if advertiseMode is CUSTOM and
is advertised to all peers of the router. These groups will be
advertised in addition to any specified prefixes. Leave this field
blank to advertise no custom groups.
This enum field has the one valid value: ALL_SUBNETS
"""
return pulumi.get(self, "advertised_groups")
@property
@pulumi.getter(name="advertisedIpRanges")
def advertised_ip_ranges(self) -> Optional[List['outputs.RouterBgpAdvertisedIpRange']]:
"""
User-specified list of individual IP ranges to advertise in
custom mode. This field can only be populated if advertiseMode
is CUSTOM and is advertised to all peers of the router. These IP
ranges will be advertised in addition to any specified groups.
Leave this field blank to advertise no custom IP ranges.
Structure is documented below.
"""
return pulumi.get(self, "advertised_ip_ranges")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RouterBgpAdvertisedIpRange(dict):
def __init__(__self__, *,
range: str,
description: Optional[str] = None):
"""
:param str range: The IP range to advertise. The value must be a
CIDR-formatted string.
:param str description: User-specified description for the IP range.
"""
pulumi.set(__self__, "range", range)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def range(self) -> str:
"""
The IP range to advertise. The value must be a
CIDR-formatted string.
"""
return pulumi.get(self, "range")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User-specified description for the IP range.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RouterNatLogConfig(dict):
def __init__(__self__, *,
enable: bool,
filter: str):
"""
:param bool enable: Indicates whether or not to export logs.
:param str filter: Specifies the desired filtering of logs on this NAT.
Possible values are `ERRORS_ONLY`, `TRANSLATIONS_ONLY`, and `ALL`.
"""
pulumi.set(__self__, "enable", enable)
pulumi.set(__self__, "filter", filter)
@property
@pulumi.getter
def enable(self) -> bool:
"""
Indicates whether or not to export logs.
"""
return pulumi.get(self, "enable")
@property
@pulumi.getter
def filter(self) -> str:
"""
Specifies the desired filtering of logs on this NAT.
Possible values are `ERRORS_ONLY`, `TRANSLATIONS_ONLY`, and `ALL`.
"""
return pulumi.get(self, "filter")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RouterNatSubnetwork(dict):
def __init__(__self__, *,
name: str,
source_ip_ranges_to_nats: List[str],
secondary_ip_range_names: Optional[List[str]] = None):
"""
:param str name: Self-link of subnetwork to NAT
:param List[str] source_ip_ranges_to_nats: List of options for which source IPs in the subnetwork
should have NAT enabled. Supported values include:
`ALL_IP_RANGES`, `LIST_OF_SECONDARY_IP_RANGES`,
`PRIMARY_IP_RANGE`.
:param List[str] secondary_ip_range_names: List of the secondary ranges of the subnetwork that are allowed
to use NAT. This can be populated only if
`LIST_OF_SECONDARY_IP_RANGES` is one of the values in
sourceIpRangesToNat
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "source_ip_ranges_to_nats", source_ip_ranges_to_nats)
if secondary_ip_range_names is not None:
pulumi.set(__self__, "secondary_ip_range_names", secondary_ip_range_names)
@property
@pulumi.getter
def name(self) -> str:
"""
Self-link of subnetwork to NAT
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="sourceIpRangesToNats")
def source_ip_ranges_to_nats(self) -> List[str]:
"""
List of options for which source IPs in the subnetwork
should have NAT enabled. Supported values include:
`ALL_IP_RANGES`, `LIST_OF_SECONDARY_IP_RANGES`,
`PRIMARY_IP_RANGE`.
"""
return pulumi.get(self, "source_ip_ranges_to_nats")
@property
@pulumi.getter(name="secondaryIpRangeNames")
def secondary_ip_range_names(self) -> Optional[List[str]]:
"""
List of the secondary ranges of the subnetwork that are allowed
to use NAT. This can be populated only if
`LIST_OF_SECONDARY_IP_RANGES` is one of the values in
sourceIpRangesToNat
"""
return pulumi.get(self, "secondary_ip_range_names")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class RouterPeerAdvertisedIpRange(dict):
def __init__(__self__, *,
range: str,
description: Optional[str] = None):
"""
:param str range: The IP range to advertise. The value must be a
CIDR-formatted string.
:param str description: User-specified description for the IP range.
"""
pulumi.set(__self__, "range", range)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def range(self) -> str:
"""
The IP range to advertise. The value must be a
CIDR-formatted string.
"""
return pulumi.get(self, "range")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User-specified description for the IP range.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityPolicyRule(dict):
def __init__(__self__, *,
action: str,
match: 'outputs.SecurityPolicyRuleMatch',
priority: float,
description: Optional[str] = None,
preview: Optional[bool] = None):
"""
:param str action: Action to take when `match` matches the request. Valid values:
* "allow" : allow access to target
* "deny(status)" : deny access to target, returns the HTTP response code specified (valid values are 403, 404 and 502)
:param 'SecurityPolicyRuleMatchArgs' match: A match condition that incoming traffic is evaluated against.
If it evaluates to true, the corresponding `action` is enforced. Structure is documented below.
:param float priority: An unique positive integer indicating the priority of evaluation for a rule.
Rules are evaluated from highest priority (lowest numerically) to lowest priority (highest numerically) in order.
:param str description: An optional description of this rule. Max size is 64.
:param bool preview: When set to true, the `action` specified above is not enforced.
Stackdriver logs for requests that trigger a preview action are annotated as such.
"""
pulumi.set(__self__, "action", action)
pulumi.set(__self__, "match", match)
pulumi.set(__self__, "priority", priority)
if description is not None:
pulumi.set(__self__, "description", description)
if preview is not None:
pulumi.set(__self__, "preview", preview)
@property
@pulumi.getter
def action(self) -> str:
"""
Action to take when `match` matches the request. Valid values:
* "allow" : allow access to target
* "deny(status)" : deny access to target, returns the HTTP response code specified (valid values are 403, 404 and 502)
"""
return pulumi.get(self, "action")
@property
@pulumi.getter
def match(self) -> 'outputs.SecurityPolicyRuleMatch':
"""
A match condition that incoming traffic is evaluated against.
If it evaluates to true, the corresponding `action` is enforced. Structure is documented below.
"""
return pulumi.get(self, "match")
@property
@pulumi.getter
def priority(self) -> float:
"""
An unique positive integer indicating the priority of evaluation for a rule.
Rules are evaluated from highest priority (lowest numerically) to lowest priority (highest numerically) in order.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of this rule. Max size is 64.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def preview(self) -> Optional[bool]:
"""
When set to true, the `action` specified above is not enforced.
Stackdriver logs for requests that trigger a preview action are annotated as such.
"""
return pulumi.get(self, "preview")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityPolicyRuleMatch(dict):
def __init__(__self__, *,
config: Optional['outputs.SecurityPolicyRuleMatchConfig'] = None,
expr: Optional['outputs.SecurityPolicyRuleMatchExpr'] = None,
versioned_expr: Optional[str] = None):
"""
:param 'SecurityPolicyRuleMatchConfigArgs' config: The configuration options available when specifying `versioned_expr`.
This field must be specified if `versioned_expr` is specified and cannot be specified if `versioned_expr` is not specified.
Structure is documented below.
:param 'SecurityPolicyRuleMatchExprArgs' expr: User defined CEVAL expression. A CEVAL expression is used to specify match criteria
such as origin.ip, source.region_code and contents in the request header.
Structure is documented below.
:param str versioned_expr: Predefined rule expression. If this field is specified, `config` must also be specified.
Available options:
* SRC_IPS_V1: Must specify the corresponding `src_ip_ranges` field in `config`.
"""
if config is not None:
pulumi.set(__self__, "config", config)
if expr is not None:
pulumi.set(__self__, "expr", expr)
if versioned_expr is not None:
pulumi.set(__self__, "versioned_expr", versioned_expr)
@property
@pulumi.getter
def config(self) -> Optional['outputs.SecurityPolicyRuleMatchConfig']:
"""
The configuration options available when specifying `versioned_expr`.
This field must be specified if `versioned_expr` is specified and cannot be specified if `versioned_expr` is not specified.
Structure is documented below.
"""
return pulumi.get(self, "config")
@property
@pulumi.getter
def expr(self) -> Optional['outputs.SecurityPolicyRuleMatchExpr']:
"""
User defined CEVAL expression. A CEVAL expression is used to specify match criteria
such as origin.ip, source.region_code and contents in the request header.
Structure is documented below.
"""
return pulumi.get(self, "expr")
@property
@pulumi.getter(name="versionedExpr")
def versioned_expr(self) -> Optional[str]:
"""
Predefined rule expression. If this field is specified, `config` must also be specified.
Available options:
* SRC_IPS_V1: Must specify the corresponding `src_ip_ranges` field in `config`.
"""
return pulumi.get(self, "versioned_expr")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityPolicyRuleMatchConfig(dict):
def __init__(__self__, *,
src_ip_ranges: List[str]):
"""
:param List[str] src_ip_ranges: Set of IP addresses or ranges (IPV4 or IPV6) in CIDR notation
to match against inbound traffic. There is a limit of 10 IP ranges per rule. A value of '\*' matches all IPs
(can be used to override the default behavior).
"""
pulumi.set(__self__, "src_ip_ranges", src_ip_ranges)
@property
@pulumi.getter(name="srcIpRanges")
def src_ip_ranges(self) -> List[str]:
"""
Set of IP addresses or ranges (IPV4 or IPV6) in CIDR notation
to match against inbound traffic. There is a limit of 10 IP ranges per rule. A value of '\*' matches all IPs
(can be used to override the default behavior).
"""
return pulumi.get(self, "src_ip_ranges")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityPolicyRuleMatchExpr(dict):
def __init__(__self__, *,
expression: str):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
The application context of the containing message determines which well-known feature set of CEL is supported.
"""
pulumi.set(__self__, "expression", expression)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
The application context of the containing message determines which well-known feature set of CEL is supported.
"""
return pulumi.get(self, "expression")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityScanConfigAuthentication(dict):
def __init__(__self__, *,
custom_account: Optional['outputs.SecurityScanConfigAuthenticationCustomAccount'] = None,
google_account: Optional['outputs.SecurityScanConfigAuthenticationGoogleAccount'] = None):
"""
:param 'SecurityScanConfigAuthenticationCustomAccountArgs' custom_account: Describes authentication configuration that uses a custom account.
Structure is documented below.
:param 'SecurityScanConfigAuthenticationGoogleAccountArgs' google_account: Describes authentication configuration that uses a Google account.
Structure is documented below.
"""
if custom_account is not None:
pulumi.set(__self__, "custom_account", custom_account)
if google_account is not None:
pulumi.set(__self__, "google_account", google_account)
@property
@pulumi.getter(name="customAccount")
def custom_account(self) -> Optional['outputs.SecurityScanConfigAuthenticationCustomAccount']:
"""
Describes authentication configuration that uses a custom account.
Structure is documented below.
"""
return pulumi.get(self, "custom_account")
@property
@pulumi.getter(name="googleAccount")
def google_account(self) -> Optional['outputs.SecurityScanConfigAuthenticationGoogleAccount']:
"""
Describes authentication configuration that uses a Google account.
Structure is documented below.
"""
return pulumi.get(self, "google_account")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityScanConfigAuthenticationCustomAccount(dict):
def __init__(__self__, *,
login_url: str,
password: str,
username: str):
"""
:param str login_url: The login form URL of the website.
:param str password: The password of the custom account. The credential is stored encrypted
in GCP.
**Note**: This property is sensitive and will not be displayed in the plan.
:param str username: The user name of the custom account.
"""
pulumi.set(__self__, "login_url", login_url)
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter(name="loginUrl")
def login_url(self) -> str:
"""
The login form URL of the website.
"""
return pulumi.get(self, "login_url")
@property
@pulumi.getter
def password(self) -> str:
"""
The password of the custom account. The credential is stored encrypted
in GCP.
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The user name of the custom account.
"""
return pulumi.get(self, "username")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityScanConfigAuthenticationGoogleAccount(dict):
def __init__(__self__, *,
password: str,
username: str):
"""
:param str password: The password of the custom account. The credential is stored encrypted
in GCP.
**Note**: This property is sensitive and will not be displayed in the plan.
:param str username: The user name of the custom account.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def password(self) -> str:
"""
The password of the custom account. The credential is stored encrypted
in GCP.
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def username(self) -> str:
"""
The user name of the custom account.
"""
return pulumi.get(self, "username")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SecurityScanConfigSchedule(dict):
def __init__(__self__, *,
interval_duration_days: float,
schedule_time: Optional[str] = None):
"""
:param float interval_duration_days: The duration of time between executions in days
:param str schedule_time: A timestamp indicates when the next run will be scheduled. The value is refreshed
by the server after each run. If unspecified, it will default to current server time,
which means the scan will be scheduled to start immediately.
"""
pulumi.set(__self__, "interval_duration_days", interval_duration_days)
if schedule_time is not None:
pulumi.set(__self__, "schedule_time", schedule_time)
@property
@pulumi.getter(name="intervalDurationDays")
def interval_duration_days(self) -> float:
"""
The duration of time between executions in days
"""
return pulumi.get(self, "interval_duration_days")
@property
@pulumi.getter(name="scheduleTime")
def schedule_time(self) -> Optional[str]:
"""
A timestamp indicates when the next run will be scheduled. The value is refreshed
by the server after each run. If unspecified, it will default to current server time,
which means the scan will be scheduled to start immediately.
"""
return pulumi.get(self, "schedule_time")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SnapshotSnapshotEncryptionKey(dict):
def __init__(__self__, *,
kms_key_self_link: Optional[str] = None,
kms_key_service_account: Optional[str] = None,
raw_key: Optional[str] = None,
sha256: Optional[str] = None):
"""
:param str kms_key_self_link: The name of the encryption key that is stored in Google Cloud KMS.
:param str kms_key_service_account: The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
**Note**: This property is sensitive and will not be displayed in the plan.
:param str sha256: -
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
if kms_key_self_link is not None:
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
if kms_key_service_account is not None:
pulumi.set(__self__, "kms_key_service_account", kms_key_service_account)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
if sha256 is not None:
pulumi.set(__self__, "sha256", sha256)
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> Optional[str]:
"""
The name of the encryption key that is stored in Google Cloud KMS.
"""
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter(name="kmsKeyServiceAccount")
def kms_key_service_account(self) -> Optional[str]:
"""
The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
"""
return pulumi.get(self, "kms_key_service_account")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "raw_key")
@property
@pulumi.getter
def sha256(self) -> Optional[str]:
"""
-
The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
"""
return pulumi.get(self, "sha256")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SnapshotSourceDiskEncryptionKey(dict):
def __init__(__self__, *,
kms_key_service_account: Optional[str] = None,
raw_key: Optional[str] = None):
"""
:param str kms_key_service_account: The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
:param str raw_key: Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
**Note**: This property is sensitive and will not be displayed in the plan.
"""
if kms_key_service_account is not None:
pulumi.set(__self__, "kms_key_service_account", kms_key_service_account)
if raw_key is not None:
pulumi.set(__self__, "raw_key", raw_key)
@property
@pulumi.getter(name="kmsKeyServiceAccount")
def kms_key_service_account(self) -> Optional[str]:
"""
The service account used for the encryption request for the given KMS key.
If absent, the Compute Engine Service Agent service account is used.
"""
return pulumi.get(self, "kms_key_service_account")
@property
@pulumi.getter(name="rawKey")
def raw_key(self) -> Optional[str]:
"""
Specifies a 256-bit customer-supplied encryption key, encoded in
RFC 4648 base64 to either encrypt or decrypt this resource.
**Note**: This property is sensitive and will not be displayed in the plan.
"""
return pulumi.get(self, "raw_key")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubnetworkIAMBindingCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str title: A title for the expression, i.e. a short string describing its purpose.
:param str description: An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
"""
A title for the expression, i.e. a short string describing its purpose.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubnetworkIAMMemberCondition(dict):
def __init__(__self__, *,
expression: str,
title: str,
description: Optional[str] = None):
"""
:param str expression: Textual representation of an expression in Common Expression Language syntax.
:param str title: A title for the expression, i.e. a short string describing its purpose.
:param str description: An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
pulumi.set(__self__, "expression", expression)
pulumi.set(__self__, "title", title)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def expression(self) -> str:
"""
Textual representation of an expression in Common Expression Language syntax.
"""
return pulumi.get(self, "expression")
@property
@pulumi.getter
def title(self) -> str:
"""
A title for the expression, i.e. a short string describing its purpose.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubnetworkLogConfig(dict):
def __init__(__self__, *,
aggregation_interval: Optional[str] = None,
filter_expr: Optional[str] = None,
flow_sampling: Optional[float] = None,
metadata: Optional[str] = None,
metadata_fields: Optional[List[str]] = None):
"""
:param str aggregation_interval: Can only be specified if VPC flow logging for this subnetwork is enabled.
Toggles the aggregation interval for collecting flow logs. Increasing the
interval time will reduce the amount of generated flow logs for long
lasting connections. Default is an interval of 5 seconds per connection.
Default value is `INTERVAL_5_SEC`.
Possible values are `INTERVAL_5_SEC`, `INTERVAL_30_SEC`, `INTERVAL_1_MIN`, `INTERVAL_5_MIN`, `INTERVAL_10_MIN`, and `INTERVAL_15_MIN`.
:param str filter_expr: Export filter used to define which VPC flow logs should be logged, as as CEL expression. See
https://cloud.google.com/vpc/docs/flow-logs#filtering for details on how to format this field.
:param float flow_sampling: Can only be specified if VPC flow logging for this subnetwork is enabled.
The value of the field must be in [0, 1]. Set the sampling rate of VPC
flow logs within the subnetwork where 1.0 means all collected logs are
reported and 0.0 means no logs are reported. Default is 0.5 which means
half of all collected logs are reported.
:param str metadata: Can only be specified if VPC flow logging for this subnetwork is enabled.
Configures whether metadata fields should be added to the reported VPC
flow logs.
Default value is `INCLUDE_ALL_METADATA`.
Possible values are `EXCLUDE_ALL_METADATA`, `INCLUDE_ALL_METADATA`, and `CUSTOM_METADATA`.
:param List[str] metadata_fields: List of metadata fields that should be added to reported logs.
Can only be specified if VPC flow logs for this subnetwork is enabled and "metadata" is set to CUSTOM_METADATA.
"""
if aggregation_interval is not None:
pulumi.set(__self__, "aggregation_interval", aggregation_interval)
if filter_expr is not None:
pulumi.set(__self__, "filter_expr", filter_expr)
if flow_sampling is not None:
pulumi.set(__self__, "flow_sampling", flow_sampling)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if metadata_fields is not None:
pulumi.set(__self__, "metadata_fields", metadata_fields)
@property
@pulumi.getter(name="aggregationInterval")
def aggregation_interval(self) -> Optional[str]:
"""
Can only be specified if VPC flow logging for this subnetwork is enabled.
Toggles the aggregation interval for collecting flow logs. Increasing the
interval time will reduce the amount of generated flow logs for long
lasting connections. Default is an interval of 5 seconds per connection.
Default value is `INTERVAL_5_SEC`.
Possible values are `INTERVAL_5_SEC`, `INTERVAL_30_SEC`, `INTERVAL_1_MIN`, `INTERVAL_5_MIN`, `INTERVAL_10_MIN`, and `INTERVAL_15_MIN`.
"""
return pulumi.get(self, "aggregation_interval")
@property
@pulumi.getter(name="filterExpr")
def filter_expr(self) -> Optional[str]:
"""
Export filter used to define which VPC flow logs should be logged, as as CEL expression. See
https://cloud.google.com/vpc/docs/flow-logs#filtering for details on how to format this field.
"""
return pulumi.get(self, "filter_expr")
@property
@pulumi.getter(name="flowSampling")
def flow_sampling(self) -> Optional[float]:
"""
Can only be specified if VPC flow logging for this subnetwork is enabled.
The value of the field must be in [0, 1]. Set the sampling rate of VPC
flow logs within the subnetwork where 1.0 means all collected logs are
reported and 0.0 means no logs are reported. Default is 0.5 which means
half of all collected logs are reported.
"""
return pulumi.get(self, "flow_sampling")
@property
@pulumi.getter
def metadata(self) -> Optional[str]:
"""
Can only be specified if VPC flow logging for this subnetwork is enabled.
Configures whether metadata fields should be added to the reported VPC
flow logs.
Default value is `INCLUDE_ALL_METADATA`.
Possible values are `EXCLUDE_ALL_METADATA`, `INCLUDE_ALL_METADATA`, and `CUSTOM_METADATA`.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="metadataFields")
def metadata_fields(self) -> Optional[List[str]]:
"""
List of metadata fields that should be added to reported logs.
Can only be specified if VPC flow logs for this subnetwork is enabled and "metadata" is set to CUSTOM_METADATA.
"""
return pulumi.get(self, "metadata_fields")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class SubnetworkSecondaryIpRange(dict):
def __init__(__self__, *,
ip_cidr_range: str,
range_name: str):
"""
:param str ip_cidr_range: The range of IP addresses belonging to this subnetwork secondary
range. Provide this property when you create the subnetwork.
Ranges must be unique and non-overlapping with all primary and
secondary IP ranges within a network. Only IPv4 is supported.
:param str range_name: The name associated with this subnetwork secondary range, used
when adding an alias IP range to a VM instance. The name must
be 1-63 characters long, and comply with RFC1035. The name
must be unique within the subnetwork.
"""
pulumi.set(__self__, "ip_cidr_range", ip_cidr_range)
pulumi.set(__self__, "range_name", range_name)
@property
@pulumi.getter(name="ipCidrRange")
def ip_cidr_range(self) -> str:
"""
The range of IP addresses belonging to this subnetwork secondary
range. Provide this property when you create the subnetwork.
Ranges must be unique and non-overlapping with all primary and
secondary IP ranges within a network. Only IPv4 is supported.
"""
return pulumi.get(self, "ip_cidr_range")
@property
@pulumi.getter(name="rangeName")
def range_name(self) -> str:
"""
The name associated with this subnetwork secondary range, used
when adding an alias IP range to a VM instance. The name must
be 1-63 characters long, and comply with RFC1035. The name
must be unique within the subnetwork.
"""
return pulumi.get(self, "range_name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteAction(dict):
def __init__(__self__, *,
cors_policy: Optional['outputs.URLMapDefaultRouteActionCorsPolicy'] = None,
fault_injection_policy: Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicy'] = None,
request_mirror_policy: Optional['outputs.URLMapDefaultRouteActionRequestMirrorPolicy'] = None,
retry_policy: Optional['outputs.URLMapDefaultRouteActionRetryPolicy'] = None,
timeout: Optional['outputs.URLMapDefaultRouteActionTimeout'] = None,
url_rewrite: Optional['outputs.URLMapDefaultRouteActionUrlRewrite'] = None,
weighted_backend_services: Optional[List['outputs.URLMapDefaultRouteActionWeightedBackendService']] = None):
"""
:param 'URLMapDefaultRouteActionCorsPolicyArgs' cors_policy: The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
:param 'URLMapDefaultRouteActionFaultInjectionPolicyArgs' fault_injection_policy: The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
:param 'URLMapDefaultRouteActionRequestMirrorPolicyArgs' request_mirror_policy: Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
:param 'URLMapDefaultRouteActionRetryPolicyArgs' retry_policy: Specifies the retry policy associated with this route.
Structure is documented below.
:param 'URLMapDefaultRouteActionTimeoutArgs' timeout: Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param 'URLMapDefaultRouteActionUrlRewriteArgs' url_rewrite: The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
:param List['URLMapDefaultRouteActionWeightedBackendServiceArgs'] weighted_backend_services: A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
if cors_policy is not None:
pulumi.set(__self__, "cors_policy", cors_policy)
if fault_injection_policy is not None:
pulumi.set(__self__, "fault_injection_policy", fault_injection_policy)
if request_mirror_policy is not None:
pulumi.set(__self__, "request_mirror_policy", request_mirror_policy)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if url_rewrite is not None:
pulumi.set(__self__, "url_rewrite", url_rewrite)
if weighted_backend_services is not None:
pulumi.set(__self__, "weighted_backend_services", weighted_backend_services)
@property
@pulumi.getter(name="corsPolicy")
def cors_policy(self) -> Optional['outputs.URLMapDefaultRouteActionCorsPolicy']:
"""
The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
"""
return pulumi.get(self, "cors_policy")
@property
@pulumi.getter(name="faultInjectionPolicy")
def fault_injection_policy(self) -> Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicy']:
"""
The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
"""
return pulumi.get(self, "fault_injection_policy")
@property
@pulumi.getter(name="requestMirrorPolicy")
def request_mirror_policy(self) -> Optional['outputs.URLMapDefaultRouteActionRequestMirrorPolicy']:
"""
Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
"""
return pulumi.get(self, "request_mirror_policy")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.URLMapDefaultRouteActionRetryPolicy']:
"""
Specifies the retry policy associated with this route.
Structure is documented below.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def timeout(self) -> Optional['outputs.URLMapDefaultRouteActionTimeout']:
"""
Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="urlRewrite")
def url_rewrite(self) -> Optional['outputs.URLMapDefaultRouteActionUrlRewrite']:
"""
The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
"""
return pulumi.get(self, "url_rewrite")
@property
@pulumi.getter(name="weightedBackendServices")
def weighted_backend_services(self) -> Optional[List['outputs.URLMapDefaultRouteActionWeightedBackendService']]:
"""
A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
return pulumi.get(self, "weighted_backend_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionCorsPolicy(dict):
def __init__(__self__, *,
allow_credentials: Optional[bool] = None,
allow_headers: Optional[List[str]] = None,
allow_methods: Optional[List[str]] = None,
allow_origin_regexes: Optional[List[str]] = None,
allow_origins: Optional[List[str]] = None,
disabled: Optional[bool] = None,
expose_headers: Optional[List[str]] = None,
max_age: Optional[float] = None):
"""
:param bool allow_credentials: In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
:param List[str] allow_headers: Specifies the content for the Access-Control-Allow-Headers header.
:param List[str] allow_methods: Specifies the content for the Access-Control-Allow-Methods header.
:param List[str] allow_origin_regexes: Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param List[str] allow_origins: Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param bool disabled: If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
:param List[str] expose_headers: Specifies the content for the Access-Control-Expose-Headers header.
:param float max_age: Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if allow_headers is not None:
pulumi.set(__self__, "allow_headers", allow_headers)
if allow_methods is not None:
pulumi.set(__self__, "allow_methods", allow_methods)
if allow_origin_regexes is not None:
pulumi.set(__self__, "allow_origin_regexes", allow_origin_regexes)
if allow_origins is not None:
pulumi.set(__self__, "allow_origins", allow_origins)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if expose_headers is not None:
pulumi.set(__self__, "expose_headers", expose_headers)
if max_age is not None:
pulumi.set(__self__, "max_age", max_age)
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter(name="allowHeaders")
def allow_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Headers header.
"""
return pulumi.get(self, "allow_headers")
@property
@pulumi.getter(name="allowMethods")
def allow_methods(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Methods header.
"""
return pulumi.get(self, "allow_methods")
@property
@pulumi.getter(name="allowOriginRegexes")
def allow_origin_regexes(self) -> Optional[List[str]]:
"""
Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origin_regexes")
@property
@pulumi.getter(name="allowOrigins")
def allow_origins(self) -> Optional[List[str]]:
"""
Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origins")
@property
@pulumi.getter
def disabled(self) -> Optional[bool]:
"""
If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="exposeHeaders")
def expose_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Expose-Headers header.
"""
return pulumi.get(self, "expose_headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[float]:
"""
Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
return pulumi.get(self, "max_age")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionFaultInjectionPolicy(dict):
def __init__(__self__, *,
abort: Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicyAbort'] = None,
delay: Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicyDelay'] = None):
"""
:param 'URLMapDefaultRouteActionFaultInjectionPolicyAbortArgs' abort: The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
:param 'URLMapDefaultRouteActionFaultInjectionPolicyDelayArgs' delay: The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
if abort is not None:
pulumi.set(__self__, "abort", abort)
if delay is not None:
pulumi.set(__self__, "delay", delay)
@property
@pulumi.getter
def abort(self) -> Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicyAbort']:
"""
The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
"""
return pulumi.get(self, "abort")
@property
@pulumi.getter
def delay(self) -> Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicyDelay']:
"""
The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
return pulumi.get(self, "delay")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionFaultInjectionPolicyAbort(dict):
def __init__(__self__, *,
http_status: Optional[float] = None,
percentage: Optional[float] = None):
"""
:param float http_status: The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
if http_status is not None:
pulumi.set(__self__, "http_status", http_status)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="httpStatus")
def http_status(self) -> Optional[float]:
"""
The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
"""
return pulumi.get(self, "http_status")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionFaultInjectionPolicyDelay(dict):
def __init__(__self__, *,
fixed_delay: Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicyDelayFixedDelay'] = None,
percentage: Optional[float] = None):
"""
:param 'URLMapDefaultRouteActionFaultInjectionPolicyDelayFixedDelayArgs' fixed_delay: Specifies the value of the fixed delay interval.
Structure is documented below.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
if fixed_delay is not None:
pulumi.set(__self__, "fixed_delay", fixed_delay)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="fixedDelay")
def fixed_delay(self) -> Optional['outputs.URLMapDefaultRouteActionFaultInjectionPolicyDelayFixedDelay']:
"""
Specifies the value of the fixed delay interval.
Structure is documented below.
"""
return pulumi.get(self, "fixed_delay")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionFaultInjectionPolicyDelayFixedDelay(dict):
def __init__(__self__, *,
nanos: Optional[float] = None,
seconds: Optional[str] = None):
"""
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[str]:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionRequestMirrorPolicy(dict):
def __init__(__self__, *,
backend_service: str):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
"""
pulumi.set(__self__, "backend_service", backend_service)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionRetryPolicy(dict):
def __init__(__self__, *,
num_retries: Optional[float] = None,
per_try_timeout: Optional['outputs.URLMapDefaultRouteActionRetryPolicyPerTryTimeout'] = None,
retry_conditions: Optional[List[str]] = None):
"""
:param float num_retries: Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
:param 'URLMapDefaultRouteActionRetryPolicyPerTryTimeoutArgs' per_try_timeout: Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param List[str] retry_conditions: Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
if num_retries is not None:
pulumi.set(__self__, "num_retries", num_retries)
if per_try_timeout is not None:
pulumi.set(__self__, "per_try_timeout", per_try_timeout)
if retry_conditions is not None:
pulumi.set(__self__, "retry_conditions", retry_conditions)
@property
@pulumi.getter(name="numRetries")
def num_retries(self) -> Optional[float]:
"""
Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
"""
return pulumi.get(self, "num_retries")
@property
@pulumi.getter(name="perTryTimeout")
def per_try_timeout(self) -> Optional['outputs.URLMapDefaultRouteActionRetryPolicyPerTryTimeout']:
"""
Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "per_try_timeout")
@property
@pulumi.getter(name="retryConditions")
def retry_conditions(self) -> Optional[List[str]]:
"""
Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
return pulumi.get(self, "retry_conditions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionRetryPolicyPerTryTimeout(dict):
def __init__(__self__, *,
nanos: Optional[float] = None,
seconds: Optional[str] = None):
"""
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[str]:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionTimeout(dict):
def __init__(__self__, *,
nanos: Optional[float] = None,
seconds: Optional[str] = None):
"""
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[str]:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionUrlRewrite(dict):
def __init__(__self__, *,
host_rewrite: Optional[str] = None,
path_prefix_rewrite: Optional[str] = None):
"""
:param str host_rewrite: Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
:param str path_prefix_rewrite: Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
if host_rewrite is not None:
pulumi.set(__self__, "host_rewrite", host_rewrite)
if path_prefix_rewrite is not None:
pulumi.set(__self__, "path_prefix_rewrite", path_prefix_rewrite)
@property
@pulumi.getter(name="hostRewrite")
def host_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_rewrite")
@property
@pulumi.getter(name="pathPrefixRewrite")
def path_prefix_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "path_prefix_rewrite")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionWeightedBackendService(dict):
def __init__(__self__, *,
backend_service: Optional[str] = None,
header_action: Optional['outputs.URLMapDefaultRouteActionWeightedBackendServiceHeaderAction'] = None,
weight: Optional[float] = None):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
:param 'URLMapDefaultRouteActionWeightedBackendServiceHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
:param float weight: Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
if backend_service is not None:
pulumi.set(__self__, "backend_service", backend_service)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> Optional[str]:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.URLMapDefaultRouteActionWeightedBackendServiceHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
@property
@pulumi.getter
def weight(self) -> Optional[float]:
"""
Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionWeightedBackendServiceHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: Optional[str] = None,
header_value: Optional[str] = None,
replace: Optional[bool] = None):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
if header_name is not None:
pulumi.set(__self__, "header_name", header_name)
if header_value is not None:
pulumi.set(__self__, "header_value", header_value)
if replace is not None:
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> Optional[str]:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> Optional[str]:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> Optional[bool]:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: Optional[str] = None,
header_value: Optional[str] = None,
replace: Optional[bool] = None):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
if header_name is not None:
pulumi.set(__self__, "header_name", header_name)
if header_value is not None:
pulumi.set(__self__, "header_value", header_value)
if replace is not None:
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> Optional[str]:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> Optional[str]:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> Optional[bool]:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapDefaultUrlRedirect(dict):
def __init__(__self__, *,
strip_query: bool,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None):
"""
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
pulumi.set(__self__, "strip_query", strip_query)
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> bool:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapHostRule(dict):
def __init__(__self__, *,
hosts: List[str],
path_matcher: str,
description: Optional[str] = None):
"""
:param List[str] hosts: The list of host patterns to match. They must be valid hostnames, except * will
match any string of ([a-z0-9-.]*). In that case, * must be the first character
and must be followed in the pattern by either - or ..
:param str path_matcher: The name of the PathMatcher to use to match the path portion of the URL if the
hostRule matches the URL's host portion.
:param str description: Description of this test case.
"""
pulumi.set(__self__, "hosts", hosts)
pulumi.set(__self__, "path_matcher", path_matcher)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def hosts(self) -> List[str]:
"""
The list of host patterns to match. They must be valid hostnames, except * will
match any string of ([a-z0-9-.]*). In that case, * must be the first character
and must be followed in the pattern by either - or ..
"""
return pulumi.get(self, "hosts")
@property
@pulumi.getter(name="pathMatcher")
def path_matcher(self) -> str:
"""
The name of the PathMatcher to use to match the path portion of the URL if the
hostRule matches the URL's host portion.
"""
return pulumi.get(self, "path_matcher")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of this test case.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcher(dict):
def __init__(__self__, *,
name: str,
default_route_action: Optional['outputs.URLMapPathMatcherDefaultRouteAction'] = None,
default_service: Optional[str] = None,
default_url_redirect: Optional['outputs.URLMapPathMatcherDefaultUrlRedirect'] = None,
description: Optional[str] = None,
header_action: Optional['outputs.URLMapPathMatcherHeaderAction'] = None,
path_rules: Optional[List['outputs.URLMapPathMatcherPathRule']] = None,
route_rules: Optional[List['outputs.URLMapPathMatcherRouteRule']] = None):
"""
:param str name: The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
:param 'URLMapPathMatcherDefaultRouteActionArgs' default_route_action: defaultRouteAction takes effect when none of the pathRules or routeRules match. The load balancer performs
advanced routing actions like URL rewrites, header transformations, etc. prior to forwarding the request
to the selected backend. If defaultRouteAction specifies any weightedBackendServices, defaultService must not be set.
Conversely if defaultService is set, defaultRouteAction cannot contain any weightedBackendServices.
Only one of defaultRouteAction or defaultUrlRedirect must be set.
Structure is documented below.
:param str default_service: The backend service or backend bucket to use when none of the given paths match.
:param 'URLMapPathMatcherDefaultUrlRedirectArgs' default_url_redirect: When none of the specified hostRules match, the request is redirected to a URL specified
by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
defaultRouteAction must not be set.
Structure is documented below.
:param str description: Description of this test case.
:param 'URLMapPathMatcherHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
:param List['URLMapPathMatcherPathRuleArgs'] path_rules: The list of path rules. Use this list instead of routeRules when routing based
on simple path matching is all that's required. The order by which path rules
are specified does not matter. Matches are always done on the longest-path-first
basis. For example: a pathRule with a path /a/b/c/* will match before /a/b/*
irrespective of the order in which those paths appear in this list. Within a
given pathMatcher, only one of pathRules or routeRules must be set.
Structure is documented below.
:param List['URLMapPathMatcherRouteRuleArgs'] route_rules: The list of ordered HTTP route rules. Use this list instead of pathRules when
advanced route matching and routing actions are desired. The order of specifying
routeRules matters: the first rule that matches will cause its specified routing
action to take effect. Within a given pathMatcher, only one of pathRules or
routeRules must be set. routeRules are not supported in UrlMaps intended for
External load balancers.
Structure is documented below.
"""
pulumi.set(__self__, "name", name)
if default_route_action is not None:
pulumi.set(__self__, "default_route_action", default_route_action)
if default_service is not None:
pulumi.set(__self__, "default_service", default_service)
if default_url_redirect is not None:
pulumi.set(__self__, "default_url_redirect", default_url_redirect)
if description is not None:
pulumi.set(__self__, "description", description)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
if path_rules is not None:
pulumi.set(__self__, "path_rules", path_rules)
if route_rules is not None:
pulumi.set(__self__, "route_rules", route_rules)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="defaultRouteAction")
def default_route_action(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteAction']:
"""
defaultRouteAction takes effect when none of the pathRules or routeRules match. The load balancer performs
advanced routing actions like URL rewrites, header transformations, etc. prior to forwarding the request
to the selected backend. If defaultRouteAction specifies any weightedBackendServices, defaultService must not be set.
Conversely if defaultService is set, defaultRouteAction cannot contain any weightedBackendServices.
Only one of defaultRouteAction or defaultUrlRedirect must be set.
Structure is documented below.
"""
return pulumi.get(self, "default_route_action")
@property
@pulumi.getter(name="defaultService")
def default_service(self) -> Optional[str]:
"""
The backend service or backend bucket to use when none of the given paths match.
"""
return pulumi.get(self, "default_service")
@property
@pulumi.getter(name="defaultUrlRedirect")
def default_url_redirect(self) -> Optional['outputs.URLMapPathMatcherDefaultUrlRedirect']:
"""
When none of the specified hostRules match, the request is redirected to a URL specified
by defaultUrlRedirect. If defaultUrlRedirect is specified, defaultService or
defaultRouteAction must not be set.
Structure is documented below.
"""
return pulumi.get(self, "default_url_redirect")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of this test case.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.URLMapPathMatcherHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
@property
@pulumi.getter(name="pathRules")
def path_rules(self) -> Optional[List['outputs.URLMapPathMatcherPathRule']]:
"""
The list of path rules. Use this list instead of routeRules when routing based
on simple path matching is all that's required. The order by which path rules
are specified does not matter. Matches are always done on the longest-path-first
basis. For example: a pathRule with a path /a/b/c/* will match before /a/b/*
irrespective of the order in which those paths appear in this list. Within a
given pathMatcher, only one of pathRules or routeRules must be set.
Structure is documented below.
"""
return pulumi.get(self, "path_rules")
@property
@pulumi.getter(name="routeRules")
def route_rules(self) -> Optional[List['outputs.URLMapPathMatcherRouteRule']]:
"""
The list of ordered HTTP route rules. Use this list instead of pathRules when
advanced route matching and routing actions are desired. The order of specifying
routeRules matters: the first rule that matches will cause its specified routing
action to take effect. Within a given pathMatcher, only one of pathRules or
routeRules must be set. routeRules are not supported in UrlMaps intended for
External load balancers.
Structure is documented below.
"""
return pulumi.get(self, "route_rules")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteAction(dict):
def __init__(__self__, *,
cors_policy: Optional['outputs.URLMapPathMatcherDefaultRouteActionCorsPolicy'] = None,
fault_injection_policy: Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicy'] = None,
request_mirror_policy: Optional['outputs.URLMapPathMatcherDefaultRouteActionRequestMirrorPolicy'] = None,
retry_policy: Optional['outputs.URLMapPathMatcherDefaultRouteActionRetryPolicy'] = None,
timeout: Optional['outputs.URLMapPathMatcherDefaultRouteActionTimeout'] = None,
url_rewrite: Optional['outputs.URLMapPathMatcherDefaultRouteActionUrlRewrite'] = None,
weighted_backend_services: Optional[List['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendService']] = None):
"""
:param 'URLMapPathMatcherDefaultRouteActionCorsPolicyArgs' cors_policy: The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
:param 'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyArgs' fault_injection_policy: The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
:param 'URLMapPathMatcherDefaultRouteActionRequestMirrorPolicyArgs' request_mirror_policy: Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
:param 'URLMapPathMatcherDefaultRouteActionRetryPolicyArgs' retry_policy: Specifies the retry policy associated with this route.
Structure is documented below.
:param 'URLMapPathMatcherDefaultRouteActionTimeoutArgs' timeout: Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param 'URLMapPathMatcherDefaultRouteActionUrlRewriteArgs' url_rewrite: The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
:param List['URLMapPathMatcherDefaultRouteActionWeightedBackendServiceArgs'] weighted_backend_services: A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
if cors_policy is not None:
pulumi.set(__self__, "cors_policy", cors_policy)
if fault_injection_policy is not None:
pulumi.set(__self__, "fault_injection_policy", fault_injection_policy)
if request_mirror_policy is not None:
pulumi.set(__self__, "request_mirror_policy", request_mirror_policy)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if url_rewrite is not None:
pulumi.set(__self__, "url_rewrite", url_rewrite)
if weighted_backend_services is not None:
pulumi.set(__self__, "weighted_backend_services", weighted_backend_services)
@property
@pulumi.getter(name="corsPolicy")
def cors_policy(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionCorsPolicy']:
"""
The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
"""
return pulumi.get(self, "cors_policy")
@property
@pulumi.getter(name="faultInjectionPolicy")
def fault_injection_policy(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicy']:
"""
The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
"""
return pulumi.get(self, "fault_injection_policy")
@property
@pulumi.getter(name="requestMirrorPolicy")
def request_mirror_policy(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionRequestMirrorPolicy']:
"""
Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
"""
return pulumi.get(self, "request_mirror_policy")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionRetryPolicy']:
"""
Specifies the retry policy associated with this route.
Structure is documented below.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def timeout(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionTimeout']:
"""
Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="urlRewrite")
def url_rewrite(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionUrlRewrite']:
"""
The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
"""
return pulumi.get(self, "url_rewrite")
@property
@pulumi.getter(name="weightedBackendServices")
def weighted_backend_services(self) -> Optional[List['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendService']]:
"""
A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
return pulumi.get(self, "weighted_backend_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionCorsPolicy(dict):
def __init__(__self__, *,
allow_credentials: Optional[bool] = None,
allow_headers: Optional[List[str]] = None,
allow_methods: Optional[List[str]] = None,
allow_origin_regexes: Optional[List[str]] = None,
allow_origins: Optional[List[str]] = None,
disabled: Optional[bool] = None,
expose_headers: Optional[List[str]] = None,
max_age: Optional[float] = None):
"""
:param bool allow_credentials: In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
:param List[str] allow_headers: Specifies the content for the Access-Control-Allow-Headers header.
:param List[str] allow_methods: Specifies the content for the Access-Control-Allow-Methods header.
:param List[str] allow_origin_regexes: Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param List[str] allow_origins: Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param bool disabled: If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
:param List[str] expose_headers: Specifies the content for the Access-Control-Expose-Headers header.
:param float max_age: Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if allow_headers is not None:
pulumi.set(__self__, "allow_headers", allow_headers)
if allow_methods is not None:
pulumi.set(__self__, "allow_methods", allow_methods)
if allow_origin_regexes is not None:
pulumi.set(__self__, "allow_origin_regexes", allow_origin_regexes)
if allow_origins is not None:
pulumi.set(__self__, "allow_origins", allow_origins)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if expose_headers is not None:
pulumi.set(__self__, "expose_headers", expose_headers)
if max_age is not None:
pulumi.set(__self__, "max_age", max_age)
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter(name="allowHeaders")
def allow_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Headers header.
"""
return pulumi.get(self, "allow_headers")
@property
@pulumi.getter(name="allowMethods")
def allow_methods(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Methods header.
"""
return pulumi.get(self, "allow_methods")
@property
@pulumi.getter(name="allowOriginRegexes")
def allow_origin_regexes(self) -> Optional[List[str]]:
"""
Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origin_regexes")
@property
@pulumi.getter(name="allowOrigins")
def allow_origins(self) -> Optional[List[str]]:
"""
Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origins")
@property
@pulumi.getter
def disabled(self) -> Optional[bool]:
"""
If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="exposeHeaders")
def expose_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Expose-Headers header.
"""
return pulumi.get(self, "expose_headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[float]:
"""
Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
return pulumi.get(self, "max_age")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionFaultInjectionPolicy(dict):
def __init__(__self__, *,
abort: Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyAbort'] = None,
delay: Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelay'] = None):
"""
:param 'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyAbortArgs' abort: The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
:param 'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelayArgs' delay: The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
if abort is not None:
pulumi.set(__self__, "abort", abort)
if delay is not None:
pulumi.set(__self__, "delay", delay)
@property
@pulumi.getter
def abort(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyAbort']:
"""
The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
"""
return pulumi.get(self, "abort")
@property
@pulumi.getter
def delay(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelay']:
"""
The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
return pulumi.get(self, "delay")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyAbort(dict):
def __init__(__self__, *,
http_status: Optional[float] = None,
percentage: Optional[float] = None):
"""
:param float http_status: The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
if http_status is not None:
pulumi.set(__self__, "http_status", http_status)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="httpStatus")
def http_status(self) -> Optional[float]:
"""
The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
"""
return pulumi.get(self, "http_status")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelay(dict):
def __init__(__self__, *,
fixed_delay: Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelayFixedDelay'] = None,
percentage: Optional[float] = None):
"""
:param 'URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelayFixedDelayArgs' fixed_delay: Specifies the value of the fixed delay interval.
Structure is documented below.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
if fixed_delay is not None:
pulumi.set(__self__, "fixed_delay", fixed_delay)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="fixedDelay")
def fixed_delay(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelayFixedDelay']:
"""
Specifies the value of the fixed delay interval.
Structure is documented below.
"""
return pulumi.get(self, "fixed_delay")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionFaultInjectionPolicyDelayFixedDelay(dict):
def __init__(__self__, *,
nanos: Optional[float] = None,
seconds: Optional[str] = None):
"""
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[str]:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionRequestMirrorPolicy(dict):
def __init__(__self__, *,
backend_service: str):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
"""
pulumi.set(__self__, "backend_service", backend_service)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionRetryPolicy(dict):
def __init__(__self__, *,
num_retries: Optional[float] = None,
per_try_timeout: Optional['outputs.URLMapPathMatcherDefaultRouteActionRetryPolicyPerTryTimeout'] = None,
retry_conditions: Optional[List[str]] = None):
"""
:param float num_retries: Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
:param 'URLMapPathMatcherDefaultRouteActionRetryPolicyPerTryTimeoutArgs' per_try_timeout: Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param List[str] retry_conditions: Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
if num_retries is not None:
pulumi.set(__self__, "num_retries", num_retries)
if per_try_timeout is not None:
pulumi.set(__self__, "per_try_timeout", per_try_timeout)
if retry_conditions is not None:
pulumi.set(__self__, "retry_conditions", retry_conditions)
@property
@pulumi.getter(name="numRetries")
def num_retries(self) -> Optional[float]:
"""
Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
"""
return pulumi.get(self, "num_retries")
@property
@pulumi.getter(name="perTryTimeout")
def per_try_timeout(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionRetryPolicyPerTryTimeout']:
"""
Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "per_try_timeout")
@property
@pulumi.getter(name="retryConditions")
def retry_conditions(self) -> Optional[List[str]]:
"""
Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
return pulumi.get(self, "retry_conditions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionRetryPolicyPerTryTimeout(dict):
def __init__(__self__, *,
nanos: Optional[float] = None,
seconds: Optional[str] = None):
"""
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[str]:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionTimeout(dict):
def __init__(__self__, *,
nanos: Optional[float] = None,
seconds: Optional[str] = None):
"""
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
if seconds is not None:
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> Optional[str]:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionUrlRewrite(dict):
def __init__(__self__, *,
host_rewrite: Optional[str] = None,
path_prefix_rewrite: Optional[str] = None):
"""
:param str host_rewrite: Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
:param str path_prefix_rewrite: Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
if host_rewrite is not None:
pulumi.set(__self__, "host_rewrite", host_rewrite)
if path_prefix_rewrite is not None:
pulumi.set(__self__, "path_prefix_rewrite", path_prefix_rewrite)
@property
@pulumi.getter(name="hostRewrite")
def host_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_rewrite")
@property
@pulumi.getter(name="pathPrefixRewrite")
def path_prefix_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "path_prefix_rewrite")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionWeightedBackendService(dict):
def __init__(__self__, *,
backend_service: Optional[str] = None,
header_action: Optional['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderAction'] = None,
weight: Optional[float] = None):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
:param 'URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
:param float weight: Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
if backend_service is not None:
pulumi.set(__self__, "backend_service", backend_service)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> Optional[str]:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
@property
@pulumi.getter
def weight(self) -> Optional[float]:
"""
Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
return pulumi.get(self, "weight")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: Optional[str] = None,
header_value: Optional[str] = None,
replace: Optional[bool] = None):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
if header_name is not None:
pulumi.set(__self__, "header_name", header_name)
if header_value is not None:
pulumi.set(__self__, "header_value", header_value)
if replace is not None:
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> Optional[str]:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> Optional[str]:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> Optional[bool]:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: Optional[str] = None,
header_value: Optional[str] = None,
replace: Optional[bool] = None):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
if header_name is not None:
pulumi.set(__self__, "header_name", header_name)
if header_value is not None:
pulumi.set(__self__, "header_value", header_value)
if replace is not None:
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> Optional[str]:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> Optional[str]:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> Optional[bool]:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherDefaultUrlRedirect(dict):
def __init__(__self__, *,
strip_query: bool,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None):
"""
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
pulumi.set(__self__, "strip_query", strip_query)
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> bool:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapPathMatcherHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapPathMatcherHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapPathMatcherHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapPathMatcherHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRule(dict):
def __init__(__self__, *,
paths: List[str],
route_action: Optional['outputs.URLMapPathMatcherPathRuleRouteAction'] = None,
service: Optional[str] = None,
url_redirect: Optional['outputs.URLMapPathMatcherPathRuleUrlRedirect'] = None):
"""
:param List[str] paths: The list of path patterns to match. Each must start with / and the only place a
* is allowed is at the end following a /. The string fed to the path matcher
does not include any text after the first ? or #, and those chars are not
allowed here.
:param 'URLMapPathMatcherPathRuleRouteActionArgs' route_action: In response to a matching matchRule, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
:param str service: The backend service or backend bucket link that should be matched by this test.
:param 'URLMapPathMatcherPathRuleUrlRedirectArgs' url_redirect: When this rule is matched, the request is redirected to a URL specified by
urlRedirect. If urlRedirect is specified, service or routeAction must not be
set.
Structure is documented below.
"""
pulumi.set(__self__, "paths", paths)
if route_action is not None:
pulumi.set(__self__, "route_action", route_action)
if service is not None:
pulumi.set(__self__, "service", service)
if url_redirect is not None:
pulumi.set(__self__, "url_redirect", url_redirect)
@property
@pulumi.getter
def paths(self) -> List[str]:
"""
The list of path patterns to match. Each must start with / and the only place a
* is allowed is at the end following a /. The string fed to the path matcher
does not include any text after the first ? or #, and those chars are not
allowed here.
"""
return pulumi.get(self, "paths")
@property
@pulumi.getter(name="routeAction")
def route_action(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteAction']:
"""
In response to a matching matchRule, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
"""
return pulumi.get(self, "route_action")
@property
@pulumi.getter
def service(self) -> Optional[str]:
"""
The backend service or backend bucket link that should be matched by this test.
"""
return pulumi.get(self, "service")
@property
@pulumi.getter(name="urlRedirect")
def url_redirect(self) -> Optional['outputs.URLMapPathMatcherPathRuleUrlRedirect']:
"""
When this rule is matched, the request is redirected to a URL specified by
urlRedirect. If urlRedirect is specified, service or routeAction must not be
set.
Structure is documented below.
"""
return pulumi.get(self, "url_redirect")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteAction(dict):
def __init__(__self__, *,
cors_policy: Optional['outputs.URLMapPathMatcherPathRuleRouteActionCorsPolicy'] = None,
fault_injection_policy: Optional['outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicy'] = None,
request_mirror_policy: Optional['outputs.URLMapPathMatcherPathRuleRouteActionRequestMirrorPolicy'] = None,
retry_policy: Optional['outputs.URLMapPathMatcherPathRuleRouteActionRetryPolicy'] = None,
timeout: Optional['outputs.URLMapPathMatcherPathRuleRouteActionTimeout'] = None,
url_rewrite: Optional['outputs.URLMapPathMatcherPathRuleRouteActionUrlRewrite'] = None,
weighted_backend_services: Optional[List['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendService']] = None):
"""
:param 'URLMapPathMatcherPathRuleRouteActionCorsPolicyArgs' cors_policy: The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
:param 'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyArgs' fault_injection_policy: The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
:param 'URLMapPathMatcherPathRuleRouteActionRequestMirrorPolicyArgs' request_mirror_policy: Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
:param 'URLMapPathMatcherPathRuleRouteActionRetryPolicyArgs' retry_policy: Specifies the retry policy associated with this route.
Structure is documented below.
:param 'URLMapPathMatcherPathRuleRouteActionTimeoutArgs' timeout: Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param 'URLMapPathMatcherPathRuleRouteActionUrlRewriteArgs' url_rewrite: The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
:param List['URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceArgs'] weighted_backend_services: A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
if cors_policy is not None:
pulumi.set(__self__, "cors_policy", cors_policy)
if fault_injection_policy is not None:
pulumi.set(__self__, "fault_injection_policy", fault_injection_policy)
if request_mirror_policy is not None:
pulumi.set(__self__, "request_mirror_policy", request_mirror_policy)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if url_rewrite is not None:
pulumi.set(__self__, "url_rewrite", url_rewrite)
if weighted_backend_services is not None:
pulumi.set(__self__, "weighted_backend_services", weighted_backend_services)
@property
@pulumi.getter(name="corsPolicy")
def cors_policy(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionCorsPolicy']:
"""
The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
"""
return pulumi.get(self, "cors_policy")
@property
@pulumi.getter(name="faultInjectionPolicy")
def fault_injection_policy(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicy']:
"""
The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
"""
return pulumi.get(self, "fault_injection_policy")
@property
@pulumi.getter(name="requestMirrorPolicy")
def request_mirror_policy(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionRequestMirrorPolicy']:
"""
Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
"""
return pulumi.get(self, "request_mirror_policy")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionRetryPolicy']:
"""
Specifies the retry policy associated with this route.
Structure is documented below.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def timeout(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionTimeout']:
"""
Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="urlRewrite")
def url_rewrite(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionUrlRewrite']:
"""
The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
"""
return pulumi.get(self, "url_rewrite")
@property
@pulumi.getter(name="weightedBackendServices")
def weighted_backend_services(self) -> Optional[List['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendService']]:
"""
A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
return pulumi.get(self, "weighted_backend_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionCorsPolicy(dict):
def __init__(__self__, *,
disabled: bool,
allow_credentials: Optional[bool] = None,
allow_headers: Optional[List[str]] = None,
allow_methods: Optional[List[str]] = None,
allow_origin_regexes: Optional[List[str]] = None,
allow_origins: Optional[List[str]] = None,
expose_headers: Optional[List[str]] = None,
max_age: Optional[float] = None):
"""
:param bool disabled: If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
:param bool allow_credentials: In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
:param List[str] allow_headers: Specifies the content for the Access-Control-Allow-Headers header.
:param List[str] allow_methods: Specifies the content for the Access-Control-Allow-Methods header.
:param List[str] allow_origin_regexes: Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param List[str] allow_origins: Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param List[str] expose_headers: Specifies the content for the Access-Control-Expose-Headers header.
:param float max_age: Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
pulumi.set(__self__, "disabled", disabled)
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if allow_headers is not None:
pulumi.set(__self__, "allow_headers", allow_headers)
if allow_methods is not None:
pulumi.set(__self__, "allow_methods", allow_methods)
if allow_origin_regexes is not None:
pulumi.set(__self__, "allow_origin_regexes", allow_origin_regexes)
if allow_origins is not None:
pulumi.set(__self__, "allow_origins", allow_origins)
if expose_headers is not None:
pulumi.set(__self__, "expose_headers", expose_headers)
if max_age is not None:
pulumi.set(__self__, "max_age", max_age)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter(name="allowHeaders")
def allow_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Headers header.
"""
return pulumi.get(self, "allow_headers")
@property
@pulumi.getter(name="allowMethods")
def allow_methods(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Methods header.
"""
return pulumi.get(self, "allow_methods")
@property
@pulumi.getter(name="allowOriginRegexes")
def allow_origin_regexes(self) -> Optional[List[str]]:
"""
Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origin_regexes")
@property
@pulumi.getter(name="allowOrigins")
def allow_origins(self) -> Optional[List[str]]:
"""
Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origins")
@property
@pulumi.getter(name="exposeHeaders")
def expose_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Expose-Headers header.
"""
return pulumi.get(self, "expose_headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[float]:
"""
Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
return pulumi.get(self, "max_age")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicy(dict):
def __init__(__self__, *,
abort: Optional['outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort'] = None,
delay: Optional['outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay'] = None):
"""
:param 'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbortArgs' abort: The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
:param 'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayArgs' delay: The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
if abort is not None:
pulumi.set(__self__, "abort", abort)
if delay is not None:
pulumi.set(__self__, "delay", delay)
@property
@pulumi.getter
def abort(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort']:
"""
The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
"""
return pulumi.get(self, "abort")
@property
@pulumi.getter
def delay(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay']:
"""
The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
return pulumi.get(self, "delay")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyAbort(dict):
def __init__(__self__, *,
http_status: float,
percentage: float):
"""
:param float http_status: The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
pulumi.set(__self__, "http_status", http_status)
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="httpStatus")
def http_status(self) -> float:
"""
The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
"""
return pulumi.get(self, "http_status")
@property
@pulumi.getter
def percentage(self) -> float:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelay(dict):
def __init__(__self__, *,
fixed_delay: 'outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay',
percentage: float):
"""
:param 'URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelayArgs' fixed_delay: Specifies the value of the fixed delay interval.
Structure is documented below.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
pulumi.set(__self__, "fixed_delay", fixed_delay)
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="fixedDelay")
def fixed_delay(self) -> 'outputs.URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay':
"""
Specifies the value of the fixed delay interval.
Structure is documented below.
"""
return pulumi.get(self, "fixed_delay")
@property
@pulumi.getter
def percentage(self) -> float:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionFaultInjectionPolicyDelayFixedDelay(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionRequestMirrorPolicy(dict):
def __init__(__self__, *,
backend_service: str):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
"""
pulumi.set(__self__, "backend_service", backend_service)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionRetryPolicy(dict):
def __init__(__self__, *,
num_retries: Optional[float] = None,
per_try_timeout: Optional['outputs.URLMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout'] = None,
retry_conditions: Optional[List[str]] = None):
"""
:param float num_retries: Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
:param 'URLMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeoutArgs' per_try_timeout: Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param List[str] retry_conditions: Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
if num_retries is not None:
pulumi.set(__self__, "num_retries", num_retries)
if per_try_timeout is not None:
pulumi.set(__self__, "per_try_timeout", per_try_timeout)
if retry_conditions is not None:
pulumi.set(__self__, "retry_conditions", retry_conditions)
@property
@pulumi.getter(name="numRetries")
def num_retries(self) -> Optional[float]:
"""
Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
"""
return pulumi.get(self, "num_retries")
@property
@pulumi.getter(name="perTryTimeout")
def per_try_timeout(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout']:
"""
Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "per_try_timeout")
@property
@pulumi.getter(name="retryConditions")
def retry_conditions(self) -> Optional[List[str]]:
"""
Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
return pulumi.get(self, "retry_conditions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionRetryPolicyPerTryTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionUrlRewrite(dict):
def __init__(__self__, *,
host_rewrite: Optional[str] = None,
path_prefix_rewrite: Optional[str] = None):
"""
:param str host_rewrite: Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
:param str path_prefix_rewrite: Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
if host_rewrite is not None:
pulumi.set(__self__, "host_rewrite", host_rewrite)
if path_prefix_rewrite is not None:
pulumi.set(__self__, "path_prefix_rewrite", path_prefix_rewrite)
@property
@pulumi.getter(name="hostRewrite")
def host_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_rewrite")
@property
@pulumi.getter(name="pathPrefixRewrite")
def path_prefix_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "path_prefix_rewrite")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionWeightedBackendService(dict):
def __init__(__self__, *,
backend_service: str,
weight: float,
header_action: Optional['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction'] = None):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
:param float weight: Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
:param 'URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
pulumi.set(__self__, "backend_service", backend_service)
pulumi.set(__self__, "weight", weight)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter
def weight(self) -> float:
"""
Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
return pulumi.get(self, "weight")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherPathRuleUrlRedirect(dict):
def __init__(__self__, *,
strip_query: bool,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None):
"""
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
pulumi.set(__self__, "strip_query", strip_query)
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> bool:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRule(dict):
def __init__(__self__, *,
priority: float,
header_action: Optional['outputs.URLMapPathMatcherRouteRuleHeaderAction'] = None,
match_rules: Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRule']] = None,
route_action: Optional['outputs.URLMapPathMatcherRouteRuleRouteAction'] = None,
service: Optional[str] = None,
url_redirect: Optional['outputs.URLMapPathMatcherRouteRuleUrlRedirect'] = None):
"""
:param float priority: For routeRules within a given pathMatcher, priority determines the order
in which load balancer will interpret routeRules. RouteRules are evaluated
in order of priority, from the lowest to highest number. The priority of
a rule decreases as its number increases (1, 2, 3, N+1). The first rule
that matches the request is applied.
You cannot configure two or more routeRules with the same priority.
Priority for each rule must be set to a number between 0 and
2147483647 inclusive.
Priority numbers can have gaps, which enable you to add or remove rules
in the future without affecting the rest of the rules. For example,
1, 2, 3, 4, 5, 9, 12, 16 is a valid series of priority numbers to which
you could add rules numbered from 6 to 8, 10 to 11, and 13 to 15 in the
future without any impact on existing rules.
:param 'URLMapPathMatcherRouteRuleHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
:param List['URLMapPathMatcherRouteRuleMatchRuleArgs'] match_rules: The rules for determining a match.
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionArgs' route_action: In response to a matching matchRule, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
:param str service: The backend service or backend bucket link that should be matched by this test.
:param 'URLMapPathMatcherRouteRuleUrlRedirectArgs' url_redirect: When this rule is matched, the request is redirected to a URL specified by
urlRedirect. If urlRedirect is specified, service or routeAction must not be
set.
Structure is documented below.
"""
pulumi.set(__self__, "priority", priority)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
if match_rules is not None:
pulumi.set(__self__, "match_rules", match_rules)
if route_action is not None:
pulumi.set(__self__, "route_action", route_action)
if service is not None:
pulumi.set(__self__, "service", service)
if url_redirect is not None:
pulumi.set(__self__, "url_redirect", url_redirect)
@property
@pulumi.getter
def priority(self) -> float:
"""
For routeRules within a given pathMatcher, priority determines the order
in which load balancer will interpret routeRules. RouteRules are evaluated
in order of priority, from the lowest to highest number. The priority of
a rule decreases as its number increases (1, 2, 3, N+1). The first rule
that matches the request is applied.
You cannot configure two or more routeRules with the same priority.
Priority for each rule must be set to a number between 0 and
2147483647 inclusive.
Priority numbers can have gaps, which enable you to add or remove rules
in the future without affecting the rest of the rules. For example,
1, 2, 3, 4, 5, 9, 12, 16 is a valid series of priority numbers to which
you could add rules numbered from 6 to 8, 10 to 11, and 13 to 15 in the
future without any impact on existing rules.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.URLMapPathMatcherRouteRuleHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
@property
@pulumi.getter(name="matchRules")
def match_rules(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRule']]:
"""
The rules for determining a match.
Structure is documented below.
"""
return pulumi.get(self, "match_rules")
@property
@pulumi.getter(name="routeAction")
def route_action(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteAction']:
"""
In response to a matching matchRule, the load balancer performs advanced routing
actions like URL rewrites, header transformations, etc. prior to forwarding the
request to the selected backend. If routeAction specifies any
weightedBackendServices, service must not be set. Conversely if service is set,
routeAction cannot contain any weightedBackendServices. Only one of routeAction
or urlRedirect must be set.
Structure is documented below.
"""
return pulumi.get(self, "route_action")
@property
@pulumi.getter
def service(self) -> Optional[str]:
"""
The backend service or backend bucket link that should be matched by this test.
"""
return pulumi.get(self, "service")
@property
@pulumi.getter(name="urlRedirect")
def url_redirect(self) -> Optional['outputs.URLMapPathMatcherRouteRuleUrlRedirect']:
"""
When this rule is matched, the request is redirected to a URL specified by
urlRedirect. If urlRedirect is specified, service or routeAction must not be
set.
Structure is documented below.
"""
return pulumi.get(self, "url_redirect")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapPathMatcherRouteRuleHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapPathMatcherRouteRuleHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleMatchRule(dict):
def __init__(__self__, *,
full_path_match: Optional[str] = None,
header_matches: Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRuleHeaderMatch']] = None,
ignore_case: Optional[bool] = None,
metadata_filters: Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRuleMetadataFilter']] = None,
prefix_match: Optional[str] = None,
query_parameter_matches: Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRuleQueryParameterMatch']] = None,
regex_match: Optional[str] = None):
"""
:param str full_path_match: For satifying the matchRule condition, the path of the request must exactly
match the value specified in fullPathMatch after removing any query parameters
and anchor that may be part of the original URL. FullPathMatch must be between 1
and 1024 characters. Only one of prefixMatch, fullPathMatch or regexMatch must
be specified.
:param List['URLMapPathMatcherRouteRuleMatchRuleHeaderMatchArgs'] header_matches: Specifies a list of header match criteria, all of which must match corresponding
headers in the request.
Structure is documented below.
:param bool ignore_case: Specifies that prefixMatch and fullPathMatch matches are case sensitive.
Defaults to false.
:param List['URLMapPathMatcherRouteRuleMatchRuleMetadataFilterArgs'] metadata_filters: Opaque filter criteria used by Loadbalancer to restrict routing configuration to
a limited set xDS compliant clients. In their xDS requests to Loadbalancer, xDS
clients present node metadata. If a match takes place, the relevant routing
configuration is made available to those proxies. For each metadataFilter in
this list, if its filterMatchCriteria is set to MATCH_ANY, at least one of the
filterLabels must match the corresponding label provided in the metadata. If its
filterMatchCriteria is set to MATCH_ALL, then all of its filterLabels must match
with corresponding labels in the provided metadata. metadataFilters specified
here can be overrides those specified in ForwardingRule that refers to this
UrlMap. metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
:param str prefix_match: The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
:param List['URLMapPathMatcherRouteRuleMatchRuleQueryParameterMatchArgs'] query_parameter_matches: Specifies a list of query parameter match criteria, all of which must match
corresponding query parameters in the request.
Structure is documented below.
:param str regex_match: The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
if full_path_match is not None:
pulumi.set(__self__, "full_path_match", full_path_match)
if header_matches is not None:
pulumi.set(__self__, "header_matches", header_matches)
if ignore_case is not None:
pulumi.set(__self__, "ignore_case", ignore_case)
if metadata_filters is not None:
pulumi.set(__self__, "metadata_filters", metadata_filters)
if prefix_match is not None:
pulumi.set(__self__, "prefix_match", prefix_match)
if query_parameter_matches is not None:
pulumi.set(__self__, "query_parameter_matches", query_parameter_matches)
if regex_match is not None:
pulumi.set(__self__, "regex_match", regex_match)
@property
@pulumi.getter(name="fullPathMatch")
def full_path_match(self) -> Optional[str]:
"""
For satifying the matchRule condition, the path of the request must exactly
match the value specified in fullPathMatch after removing any query parameters
and anchor that may be part of the original URL. FullPathMatch must be between 1
and 1024 characters. Only one of prefixMatch, fullPathMatch or regexMatch must
be specified.
"""
return pulumi.get(self, "full_path_match")
@property
@pulumi.getter(name="headerMatches")
def header_matches(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRuleHeaderMatch']]:
"""
Specifies a list of header match criteria, all of which must match corresponding
headers in the request.
Structure is documented below.
"""
return pulumi.get(self, "header_matches")
@property
@pulumi.getter(name="ignoreCase")
def ignore_case(self) -> Optional[bool]:
"""
Specifies that prefixMatch and fullPathMatch matches are case sensitive.
Defaults to false.
"""
return pulumi.get(self, "ignore_case")
@property
@pulumi.getter(name="metadataFilters")
def metadata_filters(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRuleMetadataFilter']]:
"""
Opaque filter criteria used by Loadbalancer to restrict routing configuration to
a limited set xDS compliant clients. In their xDS requests to Loadbalancer, xDS
clients present node metadata. If a match takes place, the relevant routing
configuration is made available to those proxies. For each metadataFilter in
this list, if its filterMatchCriteria is set to MATCH_ANY, at least one of the
filterLabels must match the corresponding label provided in the metadata. If its
filterMatchCriteria is set to MATCH_ALL, then all of its filterLabels must match
with corresponding labels in the provided metadata. metadataFilters specified
here can be overrides those specified in ForwardingRule that refers to this
UrlMap. metadataFilters only applies to Loadbalancers that have their
loadBalancingScheme set to INTERNAL_SELF_MANAGED.
Structure is documented below.
"""
return pulumi.get(self, "metadata_filters")
@property
@pulumi.getter(name="prefixMatch")
def prefix_match(self) -> Optional[str]:
"""
The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
return pulumi.get(self, "prefix_match")
@property
@pulumi.getter(name="queryParameterMatches")
def query_parameter_matches(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleMatchRuleQueryParameterMatch']]:
"""
Specifies a list of query parameter match criteria, all of which must match
corresponding query parameters in the request.
Structure is documented below.
"""
return pulumi.get(self, "query_parameter_matches")
@property
@pulumi.getter(name="regexMatch")
def regex_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "regex_match")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleMatchRuleHeaderMatch(dict):
def __init__(__self__, *,
header_name: str,
exact_match: Optional[str] = None,
invert_match: Optional[bool] = None,
prefix_match: Optional[str] = None,
present_match: Optional[bool] = None,
range_match: Optional['outputs.URLMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch'] = None,
regex_match: Optional[str] = None,
suffix_match: Optional[str] = None):
"""
:param str header_name: The name of the header to add.
:param str exact_match: The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
:param bool invert_match: If set to false, the headerMatch is considered a match if the match criteria
above are met. If set to true, the headerMatch is considered a match if the
match criteria above are NOT met. Defaults to false.
:param str prefix_match: The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
:param bool present_match: Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
:param 'URLMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatchArgs' range_match: The header value must be an integer and its value must be in the range specified
in rangeMatch. If the header does not contain an integer, number or is empty,
the match fails. For example for a range [-5, 0] - -3 will match. - 0 will
not match. - 0.25 will not match. - -3someString will not match. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
Structure is documented below.
:param str regex_match: The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
:param str suffix_match: The value of the header must end with the contents of suffixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
pulumi.set(__self__, "header_name", header_name)
if exact_match is not None:
pulumi.set(__self__, "exact_match", exact_match)
if invert_match is not None:
pulumi.set(__self__, "invert_match", invert_match)
if prefix_match is not None:
pulumi.set(__self__, "prefix_match", prefix_match)
if present_match is not None:
pulumi.set(__self__, "present_match", present_match)
if range_match is not None:
pulumi.set(__self__, "range_match", range_match)
if regex_match is not None:
pulumi.set(__self__, "regex_match", regex_match)
if suffix_match is not None:
pulumi.set(__self__, "suffix_match", suffix_match)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="exactMatch")
def exact_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
"""
return pulumi.get(self, "exact_match")
@property
@pulumi.getter(name="invertMatch")
def invert_match(self) -> Optional[bool]:
"""
If set to false, the headerMatch is considered a match if the match criteria
above are met. If set to true, the headerMatch is considered a match if the
match criteria above are NOT met. Defaults to false.
"""
return pulumi.get(self, "invert_match")
@property
@pulumi.getter(name="prefixMatch")
def prefix_match(self) -> Optional[str]:
"""
The value of the header must start with the contents of prefixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
return pulumi.get(self, "prefix_match")
@property
@pulumi.getter(name="presentMatch")
def present_match(self) -> Optional[bool]:
"""
Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "present_match")
@property
@pulumi.getter(name="rangeMatch")
def range_match(self) -> Optional['outputs.URLMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch']:
"""
The header value must be an integer and its value must be in the range specified
in rangeMatch. If the header does not contain an integer, number or is empty,
the match fails. For example for a range [-5, 0] - -3 will match. - 0 will
not match. - 0.25 will not match. - -3someString will not match. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
Structure is documented below.
"""
return pulumi.get(self, "range_match")
@property
@pulumi.getter(name="regexMatch")
def regex_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "regex_match")
@property
@pulumi.getter(name="suffixMatch")
def suffix_match(self) -> Optional[str]:
"""
The value of the header must end with the contents of suffixMatch. Only one of
exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch
must be set.
"""
return pulumi.get(self, "suffix_match")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleMatchRuleHeaderMatchRangeMatch(dict):
def __init__(__self__, *,
range_end: float,
range_start: float):
"""
:param float range_end: The end of the range (exclusive).
:param float range_start: The start of the range (inclusive).
"""
pulumi.set(__self__, "range_end", range_end)
pulumi.set(__self__, "range_start", range_start)
@property
@pulumi.getter(name="rangeEnd")
def range_end(self) -> float:
"""
The end of the range (exclusive).
"""
return pulumi.get(self, "range_end")
@property
@pulumi.getter(name="rangeStart")
def range_start(self) -> float:
"""
The start of the range (inclusive).
"""
return pulumi.get(self, "range_start")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleMatchRuleMetadataFilter(dict):
def __init__(__self__, *,
filter_labels: List['outputs.URLMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel'],
filter_match_criteria: str):
"""
:param List['URLMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabelArgs'] filter_labels: The list of label value pairs that must match labels in the provided metadata
based on filterMatchCriteria This list must not be empty and can have at the
most 64 entries.
Structure is documented below.
:param str filter_match_criteria: Specifies how individual filterLabel matches within the list of filterLabels
contribute towards the overall metadataFilter match. Supported values are:
- MATCH_ANY: At least one of the filterLabels must have a matching label in the
provided metadata.
- MATCH_ALL: All filterLabels must have matching labels in
the provided metadata.
Possible values are `MATCH_ALL` and `MATCH_ANY`.
"""
pulumi.set(__self__, "filter_labels", filter_labels)
pulumi.set(__self__, "filter_match_criteria", filter_match_criteria)
@property
@pulumi.getter(name="filterLabels")
def filter_labels(self) -> List['outputs.URLMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel']:
"""
The list of label value pairs that must match labels in the provided metadata
based on filterMatchCriteria This list must not be empty and can have at the
most 64 entries.
Structure is documented below.
"""
return pulumi.get(self, "filter_labels")
@property
@pulumi.getter(name="filterMatchCriteria")
def filter_match_criteria(self) -> str:
"""
Specifies how individual filterLabel matches within the list of filterLabels
contribute towards the overall metadataFilter match. Supported values are:
- MATCH_ANY: At least one of the filterLabels must have a matching label in the
provided metadata.
- MATCH_ALL: All filterLabels must have matching labels in
the provided metadata.
Possible values are `MATCH_ALL` and `MATCH_ANY`.
"""
return pulumi.get(self, "filter_match_criteria")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleMatchRuleMetadataFilterFilterLabel(dict):
def __init__(__self__, *,
name: str,
value: str):
"""
:param str name: The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
:param str value: The value of the label must match the specified value. value can have a maximum
length of 1024 characters.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
The value of the label must match the specified value. value can have a maximum
length of 1024 characters.
"""
return pulumi.get(self, "value")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleMatchRuleQueryParameterMatch(dict):
def __init__(__self__, *,
name: str,
exact_match: Optional[str] = None,
present_match: Optional[bool] = None,
regex_match: Optional[str] = None):
"""
:param str name: The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
:param str exact_match: The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
:param bool present_match: Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
:param str regex_match: The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
pulumi.set(__self__, "name", name)
if exact_match is not None:
pulumi.set(__self__, "exact_match", exact_match)
if present_match is not None:
pulumi.set(__self__, "present_match", present_match)
if regex_match is not None:
pulumi.set(__self__, "regex_match", regex_match)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the query parameter to match. The query parameter must exist in the
request, in the absence of which the request match fails.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="exactMatch")
def exact_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter exactly matches
the contents of exactMatch. Only one of presentMatch, exactMatch and regexMatch
must be set.
"""
return pulumi.get(self, "exact_match")
@property
@pulumi.getter(name="presentMatch")
def present_match(self) -> Optional[bool]:
"""
Specifies that the queryParameterMatch matches if the request contains the query
parameter, irrespective of whether the parameter has a value or not. Only one of
presentMatch, exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "present_match")
@property
@pulumi.getter(name="regexMatch")
def regex_match(self) -> Optional[str]:
"""
The queryParameterMatch matches if the value of the parameter matches the
regular expression specified by regexMatch. For the regular expression grammar,
please see en.cppreference.com/w/cpp/regex/ecmascript Only one of presentMatch,
exactMatch and regexMatch must be set.
"""
return pulumi.get(self, "regex_match")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteAction(dict):
def __init__(__self__, *,
cors_policy: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionCorsPolicy'] = None,
fault_injection_policy: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy'] = None,
request_mirror_policy: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy'] = None,
retry_policy: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionRetryPolicy'] = None,
timeout: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionTimeout'] = None,
url_rewrite: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionUrlRewrite'] = None,
weighted_backend_services: Optional[List['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendService']] = None):
"""
:param 'URLMapPathMatcherRouteRuleRouteActionCorsPolicyArgs' cors_policy: The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyArgs' fault_injection_policy: The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionRequestMirrorPolicyArgs' request_mirror_policy: Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionRetryPolicyArgs' retry_policy: Specifies the retry policy associated with this route.
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionTimeoutArgs' timeout: Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionUrlRewriteArgs' url_rewrite: The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
:param List['URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceArgs'] weighted_backend_services: A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
if cors_policy is not None:
pulumi.set(__self__, "cors_policy", cors_policy)
if fault_injection_policy is not None:
pulumi.set(__self__, "fault_injection_policy", fault_injection_policy)
if request_mirror_policy is not None:
pulumi.set(__self__, "request_mirror_policy", request_mirror_policy)
if retry_policy is not None:
pulumi.set(__self__, "retry_policy", retry_policy)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if url_rewrite is not None:
pulumi.set(__self__, "url_rewrite", url_rewrite)
if weighted_backend_services is not None:
pulumi.set(__self__, "weighted_backend_services", weighted_backend_services)
@property
@pulumi.getter(name="corsPolicy")
def cors_policy(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionCorsPolicy']:
"""
The specification for allowing client side cross-origin requests. Please see
[W3C Recommendation for Cross Origin Resource Sharing](https://www.w3.org/TR/cors/)
Structure is documented below.
"""
return pulumi.get(self, "cors_policy")
@property
@pulumi.getter(name="faultInjectionPolicy")
def fault_injection_policy(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy']:
"""
The specification for fault injection introduced into traffic to test the resiliency of clients to backend service failure.
As part of fault injection, when clients send requests to a backend service, delays can be introduced by Loadbalancer on a
percentage of requests before sending those request to the backend service. Similarly requests from clients can be aborted
by the Loadbalancer for a percentage of requests.
timeout and retryPolicy will be ignored by clients that are configured with a faultInjectionPolicy.
Structure is documented below.
"""
return pulumi.get(self, "fault_injection_policy")
@property
@pulumi.getter(name="requestMirrorPolicy")
def request_mirror_policy(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy']:
"""
Specifies the policy on how requests intended for the route's backends are shadowed to a separate mirrored backend service.
Loadbalancer does not wait for responses from the shadow service. Prior to sending traffic to the shadow service,
the host / authority header is suffixed with -shadow.
Structure is documented below.
"""
return pulumi.get(self, "request_mirror_policy")
@property
@pulumi.getter(name="retryPolicy")
def retry_policy(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionRetryPolicy']:
"""
Specifies the retry policy associated with this route.
Structure is documented below.
"""
return pulumi.get(self, "retry_policy")
@property
@pulumi.getter
def timeout(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionTimeout']:
"""
Specifies the timeout for the selected route. Timeout is computed from the time the request has been
fully processed (i.e. end-of-stream) up until the response has been completely processed. Timeout includes all retries.
If not specified, will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter(name="urlRewrite")
def url_rewrite(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionUrlRewrite']:
"""
The spec to modify the URL of the request, prior to forwarding the request to the matched service.
Structure is documented below.
"""
return pulumi.get(self, "url_rewrite")
@property
@pulumi.getter(name="weightedBackendServices")
def weighted_backend_services(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendService']]:
"""
A list of weighted backend services to send traffic to when a route match occurs.
The weights determine the fraction of traffic that flows to their corresponding backend service.
If all traffic needs to go to a single backend service, there must be one weightedBackendService
with weight set to a non 0 number.
Once a backendService is identified and before forwarding the request to the backend service,
advanced routing actions like Url rewrites and header transformations are applied depending on
additional settings specified in this HttpRouteAction.
Structure is documented below.
"""
return pulumi.get(self, "weighted_backend_services")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionCorsPolicy(dict):
def __init__(__self__, *,
allow_credentials: Optional[bool] = None,
allow_headers: Optional[List[str]] = None,
allow_methods: Optional[List[str]] = None,
allow_origin_regexes: Optional[List[str]] = None,
allow_origins: Optional[List[str]] = None,
disabled: Optional[bool] = None,
expose_headers: Optional[List[str]] = None,
max_age: Optional[float] = None):
"""
:param bool allow_credentials: In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
:param List[str] allow_headers: Specifies the content for the Access-Control-Allow-Headers header.
:param List[str] allow_methods: Specifies the content for the Access-Control-Allow-Methods header.
:param List[str] allow_origin_regexes: Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param List[str] allow_origins: Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
:param bool disabled: If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
:param List[str] expose_headers: Specifies the content for the Access-Control-Expose-Headers header.
:param float max_age: Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
if allow_credentials is not None:
pulumi.set(__self__, "allow_credentials", allow_credentials)
if allow_headers is not None:
pulumi.set(__self__, "allow_headers", allow_headers)
if allow_methods is not None:
pulumi.set(__self__, "allow_methods", allow_methods)
if allow_origin_regexes is not None:
pulumi.set(__self__, "allow_origin_regexes", allow_origin_regexes)
if allow_origins is not None:
pulumi.set(__self__, "allow_origins", allow_origins)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if expose_headers is not None:
pulumi.set(__self__, "expose_headers", expose_headers)
if max_age is not None:
pulumi.set(__self__, "max_age", max_age)
@property
@pulumi.getter(name="allowCredentials")
def allow_credentials(self) -> Optional[bool]:
"""
In response to a preflight request, setting this to true indicates that the actual request can include user credentials.
This translates to the Access-Control-Allow-Credentials header.
"""
return pulumi.get(self, "allow_credentials")
@property
@pulumi.getter(name="allowHeaders")
def allow_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Headers header.
"""
return pulumi.get(self, "allow_headers")
@property
@pulumi.getter(name="allowMethods")
def allow_methods(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Allow-Methods header.
"""
return pulumi.get(self, "allow_methods")
@property
@pulumi.getter(name="allowOriginRegexes")
def allow_origin_regexes(self) -> Optional[List[str]]:
"""
Specifies the regualar expression patterns that match allowed origins. For regular expression grammar
please see en.cppreference.com/w/cpp/regex/ecmascript
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origin_regexes")
@property
@pulumi.getter(name="allowOrigins")
def allow_origins(self) -> Optional[List[str]]:
"""
Specifies the list of origins that will be allowed to do CORS requests.
An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes.
"""
return pulumi.get(self, "allow_origins")
@property
@pulumi.getter
def disabled(self) -> Optional[bool]:
"""
If true, specifies the CORS policy is disabled. The default value is false, which indicates that the CORS policy is in effect.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="exposeHeaders")
def expose_headers(self) -> Optional[List[str]]:
"""
Specifies the content for the Access-Control-Expose-Headers header.
"""
return pulumi.get(self, "expose_headers")
@property
@pulumi.getter(name="maxAge")
def max_age(self) -> Optional[float]:
"""
Specifies how long results of a preflight request can be cached in seconds.
This translates to the Access-Control-Max-Age header.
"""
return pulumi.get(self, "max_age")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicy(dict):
def __init__(__self__, *,
abort: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort'] = None,
delay: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay'] = None):
"""
:param 'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbortArgs' abort: The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
:param 'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayArgs' delay: The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
if abort is not None:
pulumi.set(__self__, "abort", abort)
if delay is not None:
pulumi.set(__self__, "delay", delay)
@property
@pulumi.getter
def abort(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort']:
"""
The specification for how client requests are aborted as part of fault injection.
Structure is documented below.
"""
return pulumi.get(self, "abort")
@property
@pulumi.getter
def delay(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay']:
"""
The specification for how client requests are delayed as part of fault injection, before being sent to a backend service.
Structure is documented below.
"""
return pulumi.get(self, "delay")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyAbort(dict):
def __init__(__self__, *,
http_status: Optional[float] = None,
percentage: Optional[float] = None):
"""
:param float http_status: The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
if http_status is not None:
pulumi.set(__self__, "http_status", http_status)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="httpStatus")
def http_status(self) -> Optional[float]:
"""
The HTTP status code used to abort the request.
The value must be between 200 and 599 inclusive.
"""
return pulumi.get(self, "http_status")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelay(dict):
def __init__(__self__, *,
fixed_delay: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay'] = None,
percentage: Optional[float] = None):
"""
:param 'URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelayArgs' fixed_delay: Specifies the value of the fixed delay interval.
Structure is documented below.
:param float percentage: The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
if fixed_delay is not None:
pulumi.set(__self__, "fixed_delay", fixed_delay)
if percentage is not None:
pulumi.set(__self__, "percentage", percentage)
@property
@pulumi.getter(name="fixedDelay")
def fixed_delay(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay']:
"""
Specifies the value of the fixed delay interval.
Structure is documented below.
"""
return pulumi.get(self, "fixed_delay")
@property
@pulumi.getter
def percentage(self) -> Optional[float]:
"""
The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection.
The value must be between 0.0 and 100.0 inclusive.
"""
return pulumi.get(self, "percentage")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionFaultInjectionPolicyDelayFixedDelay(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionRequestMirrorPolicy(dict):
def __init__(__self__, *,
backend_service: str):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
"""
pulumi.set(__self__, "backend_service", backend_service)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionRetryPolicy(dict):
def __init__(__self__, *,
num_retries: float,
per_try_timeout: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout'] = None,
retry_conditions: Optional[List[str]] = None):
"""
:param float num_retries: Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
:param 'URLMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeoutArgs' per_try_timeout: Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
:param List[str] retry_conditions: Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
pulumi.set(__self__, "num_retries", num_retries)
if per_try_timeout is not None:
pulumi.set(__self__, "per_try_timeout", per_try_timeout)
if retry_conditions is not None:
pulumi.set(__self__, "retry_conditions", retry_conditions)
@property
@pulumi.getter(name="numRetries")
def num_retries(self) -> float:
"""
Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1.
"""
return pulumi.get(self, "num_retries")
@property
@pulumi.getter(name="perTryTimeout")
def per_try_timeout(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout']:
"""
Specifies a non-zero timeout per retry attempt.
If not specified, will use the timeout set in HttpRouteAction. If timeout in HttpRouteAction is not set,
will use the largest timeout among all backend services associated with the route.
Structure is documented below.
"""
return pulumi.get(self, "per_try_timeout")
@property
@pulumi.getter(name="retryConditions")
def retry_conditions(self) -> Optional[List[str]]:
"""
Specfies one or more conditions when this retry rule applies. Valid values are:
5xx: Loadbalancer will attempt a retry if the backend service responds with any 5xx response code,
or if the backend service does not respond at all, example: disconnects, reset, read timeout,
connection failure, and refused streams.
gateway-error: Similar to 5xx, but only applies to response codes 502, 503 or 504.
connect-failure: Loadbalancer will retry on failures connecting to backend services,
for example due to connection timeouts.
retriable-4xx: Loadbalancer will retry for retriable 4xx response codes.
Currently the only retriable error supported is 409.
refused-stream:Loadbalancer will retry if the backend service resets the stream with a REFUSED_STREAM error code.
This reset type indicates that it is safe to retry.
cancelled: Loadbalancer will retry if the gRPC status code in the response header is set to cancelled
deadline-exceeded: Loadbalancer will retry if the gRPC status code in the response header is set to deadline-exceeded
resource-exhausted: Loadbalancer will retry if the gRPC status code in the response header is set to resource-exhausted
unavailable: Loadbalancer will retry if the gRPC status code in the response header is set to unavailable
"""
return pulumi.get(self, "retry_conditions")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionRetryPolicyPerTryTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionTimeout(dict):
def __init__(__self__, *,
seconds: str,
nanos: Optional[float] = None):
"""
:param str seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
:param float nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
pulumi.set(__self__, "seconds", seconds)
if nanos is not None:
pulumi.set(__self__, "nanos", nanos)
@property
@pulumi.getter
def seconds(self) -> str:
"""
Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Note: these bounds are computed from: 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
"""
return pulumi.get(self, "seconds")
@property
@pulumi.getter
def nanos(self) -> Optional[float]:
"""
Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are
represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive.
"""
return pulumi.get(self, "nanos")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionUrlRewrite(dict):
def __init__(__self__, *,
host_rewrite: Optional[str] = None,
path_prefix_rewrite: Optional[str] = None):
"""
:param str host_rewrite: Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
:param str path_prefix_rewrite: Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
if host_rewrite is not None:
pulumi.set(__self__, "host_rewrite", host_rewrite)
if path_prefix_rewrite is not None:
pulumi.set(__self__, "path_prefix_rewrite", path_prefix_rewrite)
@property
@pulumi.getter(name="hostRewrite")
def host_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected service, the request's host header is replaced
with contents of hostRewrite.
The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_rewrite")
@property
@pulumi.getter(name="pathPrefixRewrite")
def path_prefix_rewrite(self) -> Optional[str]:
"""
Prior to forwarding the request to the selected backend service, the matching portion of the
request's path is replaced by pathPrefixRewrite.
The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "path_prefix_rewrite")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionWeightedBackendService(dict):
def __init__(__self__, *,
backend_service: str,
weight: float,
header_action: Optional['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction'] = None):
"""
:param str backend_service: The full or partial URL to the BackendService resource being mirrored to.
:param float weight: Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
:param 'URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionArgs' header_action: Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
pulumi.set(__self__, "backend_service", backend_service)
pulumi.set(__self__, "weight", weight)
if header_action is not None:
pulumi.set(__self__, "header_action", header_action)
@property
@pulumi.getter(name="backendService")
def backend_service(self) -> str:
"""
The full or partial URL to the BackendService resource being mirrored to.
"""
return pulumi.get(self, "backend_service")
@property
@pulumi.getter
def weight(self) -> float:
"""
Specifies the fraction of traffic sent to backendService, computed as
weight / (sum of all weightedBackendService weights in routeAction) .
The selection of a backend service is determined only for new traffic. Once a user's request
has been directed to a backendService, subsequent requests will be sent to the same backendService
as determined by the BackendService's session affinity policy.
The value must be between 0 and 1000
"""
return pulumi.get(self, "weight")
@property
@pulumi.getter(name="headerAction")
def header_action(self) -> Optional['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction']:
"""
Specifies changes to request and response headers that need to take effect for
the selected backendService.
headerAction specified here take effect before headerAction in the enclosing
HttpRouteRule, PathMatcher and UrlMap.
Structure is documented below.
"""
return pulumi.get(self, "header_action")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderAction(dict):
def __init__(__self__, *,
request_headers_to_adds: Optional[List['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']] = None,
request_headers_to_removes: Optional[List[str]] = None,
response_headers_to_adds: Optional[List['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']] = None,
response_headers_to_removes: Optional[List[str]] = None):
"""
:param List['URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAddArgs'] request_headers_to_adds: Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
:param List[str] request_headers_to_removes: A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
:param List['URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAddArgs'] response_headers_to_adds: Headers to add the response prior to sending the response back to the client.
Structure is documented below.
:param List[str] response_headers_to_removes: A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
if request_headers_to_adds is not None:
pulumi.set(__self__, "request_headers_to_adds", request_headers_to_adds)
if request_headers_to_removes is not None:
pulumi.set(__self__, "request_headers_to_removes", request_headers_to_removes)
if response_headers_to_adds is not None:
pulumi.set(__self__, "response_headers_to_adds", response_headers_to_adds)
if response_headers_to_removes is not None:
pulumi.set(__self__, "response_headers_to_removes", response_headers_to_removes)
@property
@pulumi.getter(name="requestHeadersToAdds")
def request_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd']]:
"""
Headers to add to a matching request prior to forwarding the request to the backendService.
Structure is documented below.
"""
return pulumi.get(self, "request_headers_to_adds")
@property
@pulumi.getter(name="requestHeadersToRemoves")
def request_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the request prior to
forwarding the request to the backendService.
"""
return pulumi.get(self, "request_headers_to_removes")
@property
@pulumi.getter(name="responseHeadersToAdds")
def response_headers_to_adds(self) -> Optional[List['outputs.URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd']]:
"""
Headers to add the response prior to sending the response back to the client.
Structure is documented below.
"""
return pulumi.get(self, "response_headers_to_adds")
@property
@pulumi.getter(name="responseHeadersToRemoves")
def response_headers_to_removes(self) -> Optional[List[str]]:
"""
A list of header names for headers that need to be removed from the response prior to sending the
response back to the client.
"""
return pulumi.get(self, "response_headers_to_removes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionRequestHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleRouteActionWeightedBackendServiceHeaderActionResponseHeadersToAdd(dict):
def __init__(__self__, *,
header_name: str,
header_value: str,
replace: bool):
"""
:param str header_name: The name of the header to add.
:param str header_value: The value of the header to add.
:param bool replace: If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
pulumi.set(__self__, "header_name", header_name)
pulumi.set(__self__, "header_value", header_value)
pulumi.set(__self__, "replace", replace)
@property
@pulumi.getter(name="headerName")
def header_name(self) -> str:
"""
The name of the header to add.
"""
return pulumi.get(self, "header_name")
@property
@pulumi.getter(name="headerValue")
def header_value(self) -> str:
"""
The value of the header to add.
"""
return pulumi.get(self, "header_value")
@property
@pulumi.getter
def replace(self) -> bool:
"""
If false, headerValue is appended to any values that already exist for the header.
If true, headerValue is set for the header, discarding any values that were set for that header.
"""
return pulumi.get(self, "replace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapPathMatcherRouteRuleUrlRedirect(dict):
def __init__(__self__, *,
host_redirect: Optional[str] = None,
https_redirect: Optional[bool] = None,
path_redirect: Optional[str] = None,
prefix_redirect: Optional[str] = None,
redirect_response_code: Optional[str] = None,
strip_query: Optional[bool] = None):
"""
:param str host_redirect: The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
:param bool https_redirect: If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
:param str path_redirect: The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
:param str prefix_redirect: The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
:param str redirect_response_code: The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
:param bool strip_query: If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
if host_redirect is not None:
pulumi.set(__self__, "host_redirect", host_redirect)
if https_redirect is not None:
pulumi.set(__self__, "https_redirect", https_redirect)
if path_redirect is not None:
pulumi.set(__self__, "path_redirect", path_redirect)
if prefix_redirect is not None:
pulumi.set(__self__, "prefix_redirect", prefix_redirect)
if redirect_response_code is not None:
pulumi.set(__self__, "redirect_response_code", redirect_response_code)
if strip_query is not None:
pulumi.set(__self__, "strip_query", strip_query)
@property
@pulumi.getter(name="hostRedirect")
def host_redirect(self) -> Optional[str]:
"""
The host that will be used in the redirect response instead of the one that was
supplied in the request. The value must be between 1 and 255 characters.
"""
return pulumi.get(self, "host_redirect")
@property
@pulumi.getter(name="httpsRedirect")
def https_redirect(self) -> Optional[bool]:
"""
If set to true, the URL scheme in the redirected request is set to https. If set to
false, the URL scheme of the redirected request will remain the same as that of the
request. This must only be set for UrlMaps used in TargetHttpProxys. Setting this
true for TargetHttpsProxy is not permitted. The default is set to false.
"""
return pulumi.get(self, "https_redirect")
@property
@pulumi.getter(name="pathRedirect")
def path_redirect(self) -> Optional[str]:
"""
The path that will be used in the redirect response instead of the one that was
supplied in the request. pathRedirect cannot be supplied together with
prefixRedirect. Supply one alone or neither. If neither is supplied, the path of the
original request will be used for the redirect. The value must be between 1 and 1024
characters.
"""
return pulumi.get(self, "path_redirect")
@property
@pulumi.getter(name="prefixRedirect")
def prefix_redirect(self) -> Optional[str]:
"""
The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch,
retaining the remaining portion of the URL before redirecting the request.
prefixRedirect cannot be supplied together with pathRedirect. Supply one alone or
neither. If neither is supplied, the path of the original request will be used for
the redirect. The value must be between 1 and 1024 characters.
"""
return pulumi.get(self, "prefix_redirect")
@property
@pulumi.getter(name="redirectResponseCode")
def redirect_response_code(self) -> Optional[str]:
"""
The HTTP Status code to use for this RedirectAction. Supported values are:
* MOVED_PERMANENTLY_DEFAULT, which is the default value and corresponds to 301.
* FOUND, which corresponds to 302.
* SEE_OTHER which corresponds to 303.
* TEMPORARY_REDIRECT, which corresponds to 307. In this case, the request method
will be retained.
* PERMANENT_REDIRECT, which corresponds to 308. In this case,
the request method will be retained.
"""
return pulumi.get(self, "redirect_response_code")
@property
@pulumi.getter(name="stripQuery")
def strip_query(self) -> Optional[bool]:
"""
If set to true, any accompanying query portion of the original URL is removed prior
to redirecting the request. If set to false, the query portion of the original URL is
retained. The default is set to false.
This field is required to ensure an empty block is not set. The normal default value is false.
"""
return pulumi.get(self, "strip_query")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class URLMapTest(dict):
def __init__(__self__, *,
host: str,
path: str,
service: str,
description: Optional[str] = None):
"""
:param str host: Host portion of the URL.
:param str path: Path portion of the URL.
:param str service: The backend service or backend bucket link that should be matched by this test.
:param str description: Description of this test case.
"""
pulumi.set(__self__, "host", host)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "service", service)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def host(self) -> str:
"""
Host portion of the URL.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> str:
"""
Path portion of the URL.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def service(self) -> str:
"""
The backend service or backend bucket link that should be matched by this test.
"""
return pulumi.get(self, "service")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Description of this test case.
"""
return pulumi.get(self, "description")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class GetBackendBucketCdnPolicyResult(dict):
def __init__(__self__, *,
signed_url_cache_max_age_sec: float):
"""
:param float signed_url_cache_max_age_sec: Maximum number of seconds the response to a signed URL request will be considered fresh. After this time period, the response will be revalidated before being served. When serving responses to signed URL requests, Cloud CDN will internally behave as though all responses from this backend had a "Cache-Control: public, max-age=[TTL]" header, regardless of any existing Cache-Control header. The actual headers served in responses will not be altered.
"""
pulumi.set(__self__, "signed_url_cache_max_age_sec", signed_url_cache_max_age_sec)
@property
@pulumi.getter(name="signedUrlCacheMaxAgeSec")
def signed_url_cache_max_age_sec(self) -> float:
"""
Maximum number of seconds the response to a signed URL request will be considered fresh. After this time period, the response will be revalidated before being served. When serving responses to signed URL requests, Cloud CDN will internally behave as though all responses from this backend had a "Cache-Control: public, max-age=[TTL]" header, regardless of any existing Cache-Control header. The actual headers served in responses will not be altered.
"""
return pulumi.get(self, "signed_url_cache_max_age_sec")
@pulumi.output_type
class GetBackendServiceBackendResult(dict):
def __init__(__self__, *,
balancing_mode: str,
capacity_scaler: float,
description: str,
group: str,
max_connections: float,
max_connections_per_endpoint: float,
max_connections_per_instance: float,
max_rate: float,
max_rate_per_endpoint: float,
max_rate_per_instance: float,
max_utilization: float):
"""
:param str description: Textual description for the Backend Service.
"""
pulumi.set(__self__, "balancing_mode", balancing_mode)
pulumi.set(__self__, "capacity_scaler", capacity_scaler)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "group", group)
pulumi.set(__self__, "max_connections", max_connections)
pulumi.set(__self__, "max_connections_per_endpoint", max_connections_per_endpoint)
pulumi.set(__self__, "max_connections_per_instance", max_connections_per_instance)
pulumi.set(__self__, "max_rate", max_rate)
pulumi.set(__self__, "max_rate_per_endpoint", max_rate_per_endpoint)
pulumi.set(__self__, "max_rate_per_instance", max_rate_per_instance)
pulumi.set(__self__, "max_utilization", max_utilization)
@property
@pulumi.getter(name="balancingMode")
def balancing_mode(self) -> str:
return pulumi.get(self, "balancing_mode")
@property
@pulumi.getter(name="capacityScaler")
def capacity_scaler(self) -> float:
return pulumi.get(self, "capacity_scaler")
@property
@pulumi.getter
def description(self) -> str:
"""
Textual description for the Backend Service.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def group(self) -> str:
return pulumi.get(self, "group")
@property
@pulumi.getter(name="maxConnections")
def max_connections(self) -> float:
return pulumi.get(self, "max_connections")
@property
@pulumi.getter(name="maxConnectionsPerEndpoint")
def max_connections_per_endpoint(self) -> float:
return pulumi.get(self, "max_connections_per_endpoint")
@property
@pulumi.getter(name="maxConnectionsPerInstance")
def max_connections_per_instance(self) -> float:
return pulumi.get(self, "max_connections_per_instance")
@property
@pulumi.getter(name="maxRate")
def max_rate(self) -> float:
return pulumi.get(self, "max_rate")
@property
@pulumi.getter(name="maxRatePerEndpoint")
def max_rate_per_endpoint(self) -> float:
return pulumi.get(self, "max_rate_per_endpoint")
@property
@pulumi.getter(name="maxRatePerInstance")
def max_rate_per_instance(self) -> float:
return pulumi.get(self, "max_rate_per_instance")
@property
@pulumi.getter(name="maxUtilization")
def max_utilization(self) -> float:
return pulumi.get(self, "max_utilization")
@pulumi.output_type
class GetBackendServiceCdnPolicyResult(dict):
def __init__(__self__, *,
cache_key_policies: List['outputs.GetBackendServiceCdnPolicyCacheKeyPolicyResult'],
signed_url_cache_max_age_sec: float):
pulumi.set(__self__, "cache_key_policies", cache_key_policies)
pulumi.set(__self__, "signed_url_cache_max_age_sec", signed_url_cache_max_age_sec)
@property
@pulumi.getter(name="cacheKeyPolicies")
def cache_key_policies(self) -> List['outputs.GetBackendServiceCdnPolicyCacheKeyPolicyResult']:
return pulumi.get(self, "cache_key_policies")
@property
@pulumi.getter(name="signedUrlCacheMaxAgeSec")
def signed_url_cache_max_age_sec(self) -> float:
return pulumi.get(self, "signed_url_cache_max_age_sec")
@pulumi.output_type
class GetBackendServiceCdnPolicyCacheKeyPolicyResult(dict):
def __init__(__self__, *,
include_host: bool,
include_protocol: bool,
include_query_string: bool,
query_string_blacklists: List[str],
query_string_whitelists: List[str]):
pulumi.set(__self__, "include_host", include_host)
pulumi.set(__self__, "include_protocol", include_protocol)
pulumi.set(__self__, "include_query_string", include_query_string)
pulumi.set(__self__, "query_string_blacklists", query_string_blacklists)
pulumi.set(__self__, "query_string_whitelists", query_string_whitelists)
@property
@pulumi.getter(name="includeHost")
def include_host(self) -> bool:
return pulumi.get(self, "include_host")
@property
@pulumi.getter(name="includeProtocol")
def include_protocol(self) -> bool:
return pulumi.get(self, "include_protocol")
@property
@pulumi.getter(name="includeQueryString")
def include_query_string(self) -> bool:
return pulumi.get(self, "include_query_string")
@property
@pulumi.getter(name="queryStringBlacklists")
def query_string_blacklists(self) -> List[str]:
return pulumi.get(self, "query_string_blacklists")
@property
@pulumi.getter(name="queryStringWhitelists")
def query_string_whitelists(self) -> List[str]:
return pulumi.get(self, "query_string_whitelists")
@pulumi.output_type
class GetBackendServiceCircuitBreakerResult(dict):
def __init__(__self__, *,
connect_timeouts: List['outputs.GetBackendServiceCircuitBreakerConnectTimeoutResult'],
max_connections: float,
max_pending_requests: float,
max_requests: float,
max_requests_per_connection: float,
max_retries: float):
pulumi.set(__self__, "connect_timeouts", connect_timeouts)
pulumi.set(__self__, "max_connections", max_connections)
pulumi.set(__self__, "max_pending_requests", max_pending_requests)
pulumi.set(__self__, "max_requests", max_requests)
pulumi.set(__self__, "max_requests_per_connection", max_requests_per_connection)
pulumi.set(__self__, "max_retries", max_retries)
@property
@pulumi.getter(name="connectTimeouts")
def connect_timeouts(self) -> List['outputs.GetBackendServiceCircuitBreakerConnectTimeoutResult']:
return pulumi.get(self, "connect_timeouts")
@property
@pulumi.getter(name="maxConnections")
def max_connections(self) -> float:
return pulumi.get(self, "max_connections")
@property
@pulumi.getter(name="maxPendingRequests")
def max_pending_requests(self) -> float:
return pulumi.get(self, "max_pending_requests")
@property
@pulumi.getter(name="maxRequests")
def max_requests(self) -> float:
return pulumi.get(self, "max_requests")
@property
@pulumi.getter(name="maxRequestsPerConnection")
def max_requests_per_connection(self) -> float:
return pulumi.get(self, "max_requests_per_connection")
@property
@pulumi.getter(name="maxRetries")
def max_retries(self) -> float:
return pulumi.get(self, "max_retries")
@pulumi.output_type
class GetBackendServiceCircuitBreakerConnectTimeoutResult(dict):
def __init__(__self__, *,
nanos: float,
seconds: float):
pulumi.set(__self__, "nanos", nanos)
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> float:
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> float:
return pulumi.get(self, "seconds")
@pulumi.output_type
class GetBackendServiceConsistentHashResult(dict):
def __init__(__self__, *,
http_cookies: List['outputs.GetBackendServiceConsistentHashHttpCookyResult'],
http_header_name: str,
minimum_ring_size: float):
pulumi.set(__self__, "http_cookies", http_cookies)
pulumi.set(__self__, "http_header_name", http_header_name)
pulumi.set(__self__, "minimum_ring_size", minimum_ring_size)
@property
@pulumi.getter(name="httpCookies")
def http_cookies(self) -> List['outputs.GetBackendServiceConsistentHashHttpCookyResult']:
return pulumi.get(self, "http_cookies")
@property
@pulumi.getter(name="httpHeaderName")
def http_header_name(self) -> str:
return pulumi.get(self, "http_header_name")
@property
@pulumi.getter(name="minimumRingSize")
def minimum_ring_size(self) -> float:
return pulumi.get(self, "minimum_ring_size")
@pulumi.output_type
class GetBackendServiceConsistentHashHttpCookyResult(dict):
def __init__(__self__, *,
name: str,
path: str,
ttls: List['outputs.GetBackendServiceConsistentHashHttpCookyTtlResult']):
"""
:param str name: The name of the Backend Service.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "ttls", ttls)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the Backend Service.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def path(self) -> str:
return pulumi.get(self, "path")
@property
@pulumi.getter
def ttls(self) -> List['outputs.GetBackendServiceConsistentHashHttpCookyTtlResult']:
return pulumi.get(self, "ttls")
@pulumi.output_type
class GetBackendServiceConsistentHashHttpCookyTtlResult(dict):
def __init__(__self__, *,
nanos: float,
seconds: float):
pulumi.set(__self__, "nanos", nanos)
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> float:
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> float:
return pulumi.get(self, "seconds")
@pulumi.output_type
class GetBackendServiceIapResult(dict):
def __init__(__self__, *,
oauth2_client_id: str,
oauth2_client_secret: str,
oauth2_client_secret_sha256: str):
pulumi.set(__self__, "oauth2_client_id", oauth2_client_id)
pulumi.set(__self__, "oauth2_client_secret", oauth2_client_secret)
pulumi.set(__self__, "oauth2_client_secret_sha256", oauth2_client_secret_sha256)
@property
@pulumi.getter(name="oauth2ClientId")
def oauth2_client_id(self) -> str:
return pulumi.get(self, "oauth2_client_id")
@property
@pulumi.getter(name="oauth2ClientSecret")
def oauth2_client_secret(self) -> str:
return pulumi.get(self, "oauth2_client_secret")
@property
@pulumi.getter(name="oauth2ClientSecretSha256")
def oauth2_client_secret_sha256(self) -> str:
return pulumi.get(self, "oauth2_client_secret_sha256")
@pulumi.output_type
class GetBackendServiceLogConfigResult(dict):
def __init__(__self__, *,
enable: bool,
sample_rate: float):
pulumi.set(__self__, "enable", enable)
pulumi.set(__self__, "sample_rate", sample_rate)
@property
@pulumi.getter
def enable(self) -> bool:
return pulumi.get(self, "enable")
@property
@pulumi.getter(name="sampleRate")
def sample_rate(self) -> float:
return pulumi.get(self, "sample_rate")
@pulumi.output_type
class GetBackendServiceOutlierDetectionResult(dict):
def __init__(__self__, *,
base_ejection_times: List['outputs.GetBackendServiceOutlierDetectionBaseEjectionTimeResult'],
consecutive_errors: float,
consecutive_gateway_failure: float,
enforcing_consecutive_errors: float,
enforcing_consecutive_gateway_failure: float,
enforcing_success_rate: float,
intervals: List['outputs.GetBackendServiceOutlierDetectionIntervalResult'],
max_ejection_percent: float,
success_rate_minimum_hosts: float,
success_rate_request_volume: float,
success_rate_stdev_factor: float):
pulumi.set(__self__, "base_ejection_times", base_ejection_times)
pulumi.set(__self__, "consecutive_errors", consecutive_errors)
pulumi.set(__self__, "consecutive_gateway_failure", consecutive_gateway_failure)
pulumi.set(__self__, "enforcing_consecutive_errors", enforcing_consecutive_errors)
pulumi.set(__self__, "enforcing_consecutive_gateway_failure", enforcing_consecutive_gateway_failure)
pulumi.set(__self__, "enforcing_success_rate", enforcing_success_rate)
pulumi.set(__self__, "intervals", intervals)
pulumi.set(__self__, "max_ejection_percent", max_ejection_percent)
pulumi.set(__self__, "success_rate_minimum_hosts", success_rate_minimum_hosts)
pulumi.set(__self__, "success_rate_request_volume", success_rate_request_volume)
pulumi.set(__self__, "success_rate_stdev_factor", success_rate_stdev_factor)
@property
@pulumi.getter(name="baseEjectionTimes")
def base_ejection_times(self) -> List['outputs.GetBackendServiceOutlierDetectionBaseEjectionTimeResult']:
return pulumi.get(self, "base_ejection_times")
@property
@pulumi.getter(name="consecutiveErrors")
def consecutive_errors(self) -> float:
return pulumi.get(self, "consecutive_errors")
@property
@pulumi.getter(name="consecutiveGatewayFailure")
def consecutive_gateway_failure(self) -> float:
return pulumi.get(self, "consecutive_gateway_failure")
@property
@pulumi.getter(name="enforcingConsecutiveErrors")
def enforcing_consecutive_errors(self) -> float:
return pulumi.get(self, "enforcing_consecutive_errors")
@property
@pulumi.getter(name="enforcingConsecutiveGatewayFailure")
def enforcing_consecutive_gateway_failure(self) -> float:
return pulumi.get(self, "enforcing_consecutive_gateway_failure")
@property
@pulumi.getter(name="enforcingSuccessRate")
def enforcing_success_rate(self) -> float:
return pulumi.get(self, "enforcing_success_rate")
@property
@pulumi.getter
def intervals(self) -> List['outputs.GetBackendServiceOutlierDetectionIntervalResult']:
return pulumi.get(self, "intervals")
@property
@pulumi.getter(name="maxEjectionPercent")
def max_ejection_percent(self) -> float:
return pulumi.get(self, "max_ejection_percent")
@property
@pulumi.getter(name="successRateMinimumHosts")
def success_rate_minimum_hosts(self) -> float:
return pulumi.get(self, "success_rate_minimum_hosts")
@property
@pulumi.getter(name="successRateRequestVolume")
def success_rate_request_volume(self) -> float:
return pulumi.get(self, "success_rate_request_volume")
@property
@pulumi.getter(name="successRateStdevFactor")
def success_rate_stdev_factor(self) -> float:
return pulumi.get(self, "success_rate_stdev_factor")
@pulumi.output_type
class GetBackendServiceOutlierDetectionBaseEjectionTimeResult(dict):
def __init__(__self__, *,
nanos: float,
seconds: float):
pulumi.set(__self__, "nanos", nanos)
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> float:
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> float:
return pulumi.get(self, "seconds")
@pulumi.output_type
class GetBackendServiceOutlierDetectionIntervalResult(dict):
def __init__(__self__, *,
nanos: float,
seconds: float):
pulumi.set(__self__, "nanos", nanos)
pulumi.set(__self__, "seconds", seconds)
@property
@pulumi.getter
def nanos(self) -> float:
return pulumi.get(self, "nanos")
@property
@pulumi.getter
def seconds(self) -> float:
return pulumi.get(self, "seconds")
@pulumi.output_type
class GetInstanceAttachedDiskResult(dict):
def __init__(__self__, *,
device_name: str,
disk_encryption_key_raw: str,
disk_encryption_key_sha256: str,
kms_key_self_link: str,
mode: str,
source: str):
"""
:param str device_name: Name with which the attached disk is accessible
under `/dev/disk/by-id/`
:param str mode: Read/write mode for the disk. One of `"READ_ONLY"` or `"READ_WRITE"`.
:param str source: The name or self_link of the disk attached to this instance.
"""
pulumi.set(__self__, "device_name", device_name)
pulumi.set(__self__, "disk_encryption_key_raw", disk_encryption_key_raw)
pulumi.set(__self__, "disk_encryption_key_sha256", disk_encryption_key_sha256)
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
pulumi.set(__self__, "mode", mode)
pulumi.set(__self__, "source", source)
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
Name with which the attached disk is accessible
under `/dev/disk/by-id/`
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKeyRaw")
def disk_encryption_key_raw(self) -> str:
return pulumi.get(self, "disk_encryption_key_raw")
@property
@pulumi.getter(name="diskEncryptionKeySha256")
def disk_encryption_key_sha256(self) -> str:
return pulumi.get(self, "disk_encryption_key_sha256")
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> str:
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter
def mode(self) -> str:
"""
Read/write mode for the disk. One of `"READ_ONLY"` or `"READ_WRITE"`.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter
def source(self) -> str:
"""
The name or self_link of the disk attached to this instance.
"""
return pulumi.get(self, "source")
@pulumi.output_type
class GetInstanceBootDiskResult(dict):
def __init__(__self__, *,
auto_delete: bool,
device_name: str,
disk_encryption_key_raw: str,
disk_encryption_key_sha256: str,
initialize_params: List['outputs.GetInstanceBootDiskInitializeParamResult'],
kms_key_self_link: str,
mode: str,
source: str):
"""
:param bool auto_delete: Whether the disk will be auto-deleted when the instance is deleted.
:param str device_name: Name with which the attached disk is accessible
under `/dev/disk/by-id/`
:param List['GetInstanceBootDiskInitializeParamArgs'] initialize_params: Parameters with which a disk was created alongside the instance.
Structure is documented below.
:param str mode: Read/write mode for the disk. One of `"READ_ONLY"` or `"READ_WRITE"`.
:param str source: The name or self_link of the disk attached to this instance.
"""
pulumi.set(__self__, "auto_delete", auto_delete)
pulumi.set(__self__, "device_name", device_name)
pulumi.set(__self__, "disk_encryption_key_raw", disk_encryption_key_raw)
pulumi.set(__self__, "disk_encryption_key_sha256", disk_encryption_key_sha256)
pulumi.set(__self__, "initialize_params", initialize_params)
pulumi.set(__self__, "kms_key_self_link", kms_key_self_link)
pulumi.set(__self__, "mode", mode)
pulumi.set(__self__, "source", source)
@property
@pulumi.getter(name="autoDelete")
def auto_delete(self) -> bool:
"""
Whether the disk will be auto-deleted when the instance is deleted.
"""
return pulumi.get(self, "auto_delete")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
Name with which the attached disk is accessible
under `/dev/disk/by-id/`
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter(name="diskEncryptionKeyRaw")
def disk_encryption_key_raw(self) -> str:
return pulumi.get(self, "disk_encryption_key_raw")
@property
@pulumi.getter(name="diskEncryptionKeySha256")
def disk_encryption_key_sha256(self) -> str:
return pulumi.get(self, "disk_encryption_key_sha256")
@property
@pulumi.getter(name="initializeParams")
def initialize_params(self) -> List['outputs.GetInstanceBootDiskInitializeParamResult']:
"""
Parameters with which a disk was created alongside the instance.
Structure is documented below.
"""
return pulumi.get(self, "initialize_params")
@property
@pulumi.getter(name="kmsKeySelfLink")
def kms_key_self_link(self) -> str:
return pulumi.get(self, "kms_key_self_link")
@property
@pulumi.getter
def mode(self) -> str:
"""
Read/write mode for the disk. One of `"READ_ONLY"` or `"READ_WRITE"`.
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter
def source(self) -> str:
"""
The name or self_link of the disk attached to this instance.
"""
return pulumi.get(self, "source")
@pulumi.output_type
class GetInstanceBootDiskInitializeParamResult(dict):
def __init__(__self__, *,
image: str,
labels: Mapping[str, Any],
size: float,
type: str):
"""
:param str image: The image from which this disk was initialised.
:param Mapping[str, Any] labels: A set of key/value label pairs assigned to the instance.
:param float size: The size of the image in gigabytes.
:param str type: The accelerator type resource exposed to this instance. E.g. `nvidia-tesla-k80`.
"""
pulumi.set(__self__, "image", image)
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "size", size)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def image(self) -> str:
"""
The image from which this disk was initialised.
"""
return pulumi.get(self, "image")
@property
@pulumi.getter
def labels(self) -> Mapping[str, Any]:
"""
A set of key/value label pairs assigned to the instance.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def size(self) -> float:
"""
The size of the image in gigabytes.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource exposed to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetInstanceConfidentialInstanceConfigResult(dict):
def __init__(__self__, *,
enable_confidential_compute: bool):
pulumi.set(__self__, "enable_confidential_compute", enable_confidential_compute)
@property
@pulumi.getter(name="enableConfidentialCompute")
def enable_confidential_compute(self) -> bool:
return pulumi.get(self, "enable_confidential_compute")
@pulumi.output_type
class GetInstanceGroupNamedPortResult(dict):
def __init__(__self__, *,
name: str,
port: float):
"""
:param str name: The name of the instance group. Either `name` or `self_link` must be provided.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the instance group. Either `name` or `self_link` must be provided.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> float:
return pulumi.get(self, "port")
@pulumi.output_type
class GetInstanceGuestAcceleratorResult(dict):
def __init__(__self__, *,
count: float,
type: str):
"""
:param float count: The number of the guest accelerator cards exposed to this instance.
:param str type: The accelerator type resource exposed to this instance. E.g. `nvidia-tesla-k80`.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> float:
"""
The number of the guest accelerator cards exposed to this instance.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource exposed to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetInstanceNetworkInterfaceResult(dict):
def __init__(__self__, *,
access_configs: List['outputs.GetInstanceNetworkInterfaceAccessConfigResult'],
alias_ip_ranges: List['outputs.GetInstanceNetworkInterfaceAliasIpRangeResult'],
name: str,
network: str,
network_ip: str,
subnetwork: str,
subnetwork_project: str):
"""
:param List['GetInstanceNetworkInterfaceAccessConfigArgs'] access_configs: Access configurations, i.e. IPs via which this
instance can be accessed via the Internet. Structure documented below.
:param List['GetInstanceNetworkInterfaceAliasIpRangeArgs'] alias_ip_ranges: An array of alias IP ranges for this network interface. Structure documented below.
:param str name: The name of the instance. One of `name` or `self_link` must be provided.
:param str network: The name or self_link of the network attached to this interface.
:param str network_ip: The private IP address assigned to the instance.
:param str subnetwork: The name or self_link of the subnetwork attached to this interface.
:param str subnetwork_project: The project in which the subnetwork belongs.
"""
pulumi.set(__self__, "access_configs", access_configs)
pulumi.set(__self__, "alias_ip_ranges", alias_ip_ranges)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "network", network)
pulumi.set(__self__, "network_ip", network_ip)
pulumi.set(__self__, "subnetwork", subnetwork)
pulumi.set(__self__, "subnetwork_project", subnetwork_project)
@property
@pulumi.getter(name="accessConfigs")
def access_configs(self) -> List['outputs.GetInstanceNetworkInterfaceAccessConfigResult']:
"""
Access configurations, i.e. IPs via which this
instance can be accessed via the Internet. Structure documented below.
"""
return pulumi.get(self, "access_configs")
@property
@pulumi.getter(name="aliasIpRanges")
def alias_ip_ranges(self) -> List['outputs.GetInstanceNetworkInterfaceAliasIpRangeResult']:
"""
An array of alias IP ranges for this network interface. Structure documented below.
"""
return pulumi.get(self, "alias_ip_ranges")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the instance. One of `name` or `self_link` must be provided.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def network(self) -> str:
"""
The name or self_link of the network attached to this interface.
"""
return pulumi.get(self, "network")
@property
@pulumi.getter(name="networkIp")
def network_ip(self) -> str:
"""
The private IP address assigned to the instance.
"""
return pulumi.get(self, "network_ip")
@property
@pulumi.getter
def subnetwork(self) -> str:
"""
The name or self_link of the subnetwork attached to this interface.
"""
return pulumi.get(self, "subnetwork")
@property
@pulumi.getter(name="subnetworkProject")
def subnetwork_project(self) -> str:
"""
The project in which the subnetwork belongs.
"""
return pulumi.get(self, "subnetwork_project")
@pulumi.output_type
class GetInstanceNetworkInterfaceAccessConfigResult(dict):
def __init__(__self__, *,
nat_ip: str,
network_tier: str,
public_ptr_domain_name: str):
"""
:param str nat_ip: The IP address that is be 1:1 mapped to the instance's
network ip.
:param str network_tier: The [networking tier][network-tier] used for configuring this instance. One of `PREMIUM` or `STANDARD`.
:param str public_ptr_domain_name: The DNS domain name for the public PTR record.
"""
pulumi.set(__self__, "nat_ip", nat_ip)
pulumi.set(__self__, "network_tier", network_tier)
pulumi.set(__self__, "public_ptr_domain_name", public_ptr_domain_name)
@property
@pulumi.getter(name="natIp")
def nat_ip(self) -> str:
"""
The IP address that is be 1:1 mapped to the instance's
network ip.
"""
return pulumi.get(self, "nat_ip")
@property
@pulumi.getter(name="networkTier")
def network_tier(self) -> str:
"""
The [networking tier][network-tier] used for configuring this instance. One of `PREMIUM` or `STANDARD`.
"""
return pulumi.get(self, "network_tier")
@property
@pulumi.getter(name="publicPtrDomainName")
def public_ptr_domain_name(self) -> str:
"""
The DNS domain name for the public PTR record.
"""
return pulumi.get(self, "public_ptr_domain_name")
@pulumi.output_type
class GetInstanceNetworkInterfaceAliasIpRangeResult(dict):
def __init__(__self__, *,
ip_cidr_range: str,
subnetwork_range_name: str):
"""
:param str ip_cidr_range: The IP CIDR range represented by this alias IP range.
:param str subnetwork_range_name: The subnetwork secondary range name specifying
the secondary range from which to allocate the IP CIDR range for this alias IP
range.
"""
pulumi.set(__self__, "ip_cidr_range", ip_cidr_range)
pulumi.set(__self__, "subnetwork_range_name", subnetwork_range_name)
@property
@pulumi.getter(name="ipCidrRange")
def ip_cidr_range(self) -> str:
"""
The IP CIDR range represented by this alias IP range.
"""
return pulumi.get(self, "ip_cidr_range")
@property
@pulumi.getter(name="subnetworkRangeName")
def subnetwork_range_name(self) -> str:
"""
The subnetwork secondary range name specifying
the secondary range from which to allocate the IP CIDR range for this alias IP
range.
"""
return pulumi.get(self, "subnetwork_range_name")
@pulumi.output_type
class GetInstanceSchedulingResult(dict):
def __init__(__self__, *,
automatic_restart: bool,
min_node_cpus: float,
node_affinities: List['outputs.GetInstanceSchedulingNodeAffinityResult'],
on_host_maintenance: str,
preemptible: bool):
"""
:param bool automatic_restart: Specifies if the instance should be
restarted if it was terminated by Compute Engine (not a user).
:param str on_host_maintenance: Describes maintenance behavior for the
instance. One of `MIGRATE` or `TERMINATE`, for more info, read
[here](https://cloud.google.com/compute/docs/instances/setting-instance-scheduling-options)
:param bool preemptible: Whether the instance is preemptible.
"""
pulumi.set(__self__, "automatic_restart", automatic_restart)
pulumi.set(__self__, "min_node_cpus", min_node_cpus)
pulumi.set(__self__, "node_affinities", node_affinities)
pulumi.set(__self__, "on_host_maintenance", on_host_maintenance)
pulumi.set(__self__, "preemptible", preemptible)
@property
@pulumi.getter(name="automaticRestart")
def automatic_restart(self) -> bool:
"""
Specifies if the instance should be
restarted if it was terminated by Compute Engine (not a user).
"""
return pulumi.get(self, "automatic_restart")
@property
@pulumi.getter(name="minNodeCpus")
def min_node_cpus(self) -> float:
return pulumi.get(self, "min_node_cpus")
@property
@pulumi.getter(name="nodeAffinities")
def node_affinities(self) -> List['outputs.GetInstanceSchedulingNodeAffinityResult']:
return pulumi.get(self, "node_affinities")
@property
@pulumi.getter(name="onHostMaintenance")
def on_host_maintenance(self) -> str:
"""
Describes maintenance behavior for the
instance. One of `MIGRATE` or `TERMINATE`, for more info, read
[here](https://cloud.google.com/compute/docs/instances/setting-instance-scheduling-options)
"""
return pulumi.get(self, "on_host_maintenance")
@property
@pulumi.getter
def preemptible(self) -> bool:
"""
Whether the instance is preemptible.
"""
return pulumi.get(self, "preemptible")
@pulumi.output_type
class GetInstanceSchedulingNodeAffinityResult(dict):
def __init__(__self__, *,
key: str,
operator: str,
values: List[str]):
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "operator", operator)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@property
@pulumi.getter
def operator(self) -> str:
return pulumi.get(self, "operator")
@property
@pulumi.getter
def values(self) -> List[str]:
return pulumi.get(self, "values")
@pulumi.output_type
class GetInstanceScratchDiskResult(dict):
def __init__(__self__, *,
interface: str):
"""
:param str interface: The disk interface used for attaching this disk. One of `SCSI` or `NVME`.
"""
pulumi.set(__self__, "interface", interface)
@property
@pulumi.getter
def interface(self) -> str:
"""
The disk interface used for attaching this disk. One of `SCSI` or `NVME`.
"""
return pulumi.get(self, "interface")
@pulumi.output_type
class GetInstanceServiceAccountResult(dict):
def __init__(__self__, *,
email: str,
scopes: List[str]):
"""
:param str email: The service account e-mail address.
:param List[str] scopes: A list of service scopes.
"""
pulumi.set(__self__, "email", email)
pulumi.set(__self__, "scopes", scopes)
@property
@pulumi.getter
def email(self) -> str:
"""
The service account e-mail address.
"""
return pulumi.get(self, "email")
@property
@pulumi.getter
def scopes(self) -> List[str]:
"""
A list of service scopes.
"""
return pulumi.get(self, "scopes")
@pulumi.output_type
class GetInstanceShieldedInstanceConfigResult(dict):
def __init__(__self__, *,
enable_integrity_monitoring: bool,
enable_secure_boot: bool,
enable_vtpm: bool):
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
pulumi.set(__self__, "enable_vtpm", enable_vtpm)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> bool:
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> bool:
return pulumi.get(self, "enable_secure_boot")
@property
@pulumi.getter(name="enableVtpm")
def enable_vtpm(self) -> bool:
return pulumi.get(self, "enable_vtpm")
@pulumi.output_type
class GetRegionInstanceGroupInstanceResult(dict):
def __init__(__self__, *,
instance: str,
named_ports: List['outputs.GetRegionInstanceGroupInstanceNamedPortResult'],
status: str):
"""
:param str instance: URL to the instance.
:param List['GetRegionInstanceGroupInstanceNamedPortArgs'] named_ports: List of named ports in the group, as a list of resources, each containing:
:param str status: String description of current state of the instance.
"""
pulumi.set(__self__, "instance", instance)
pulumi.set(__self__, "named_ports", named_ports)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def instance(self) -> str:
"""
URL to the instance.
"""
return pulumi.get(self, "instance")
@property
@pulumi.getter(name="namedPorts")
def named_ports(self) -> List['outputs.GetRegionInstanceGroupInstanceNamedPortResult']:
"""
List of named ports in the group, as a list of resources, each containing:
"""
return pulumi.get(self, "named_ports")
@property
@pulumi.getter
def status(self) -> str:
"""
String description of current state of the instance.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class GetRegionInstanceGroupInstanceNamedPortResult(dict):
def __init__(__self__, *,
name: str,
port: float):
"""
:param str name: The name of the instance group. One of `name` or `self_link` must be provided.
:param float port: Integer port number
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the instance group. One of `name` or `self_link` must be provided.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> float:
"""
Integer port number
"""
return pulumi.get(self, "port")
@pulumi.output_type
class GetRouterBgpResult(dict):
def __init__(__self__, *,
advertise_mode: str,
advertised_groups: List[str],
advertised_ip_ranges: List['outputs.GetRouterBgpAdvertisedIpRangeResult'],
asn: float):
pulumi.set(__self__, "advertise_mode", advertise_mode)
pulumi.set(__self__, "advertised_groups", advertised_groups)
pulumi.set(__self__, "advertised_ip_ranges", advertised_ip_ranges)
pulumi.set(__self__, "asn", asn)
@property
@pulumi.getter(name="advertiseMode")
def advertise_mode(self) -> str:
return pulumi.get(self, "advertise_mode")
@property
@pulumi.getter(name="advertisedGroups")
def advertised_groups(self) -> List[str]:
return pulumi.get(self, "advertised_groups")
@property
@pulumi.getter(name="advertisedIpRanges")
def advertised_ip_ranges(self) -> List['outputs.GetRouterBgpAdvertisedIpRangeResult']:
return pulumi.get(self, "advertised_ip_ranges")
@property
@pulumi.getter
def asn(self) -> float:
return pulumi.get(self, "asn")
@pulumi.output_type
class GetRouterBgpAdvertisedIpRangeResult(dict):
def __init__(__self__, *,
description: str,
range: str):
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "range", range)
@property
@pulumi.getter
def description(self) -> str:
return pulumi.get(self, "description")
@property
@pulumi.getter
def range(self) -> str:
return pulumi.get(self, "range")
@pulumi.output_type
class GetSubnetworkSecondaryIpRangeResult(dict):
def __init__(__self__, *,
ip_cidr_range: str,
range_name: str):
"""
:param str ip_cidr_range: The range of IP addresses belonging to this subnetwork
secondary range.
:param str range_name: The name associated with this subnetwork secondary range, used
when adding an alias IP range to a VM instance.
"""
pulumi.set(__self__, "ip_cidr_range", ip_cidr_range)
pulumi.set(__self__, "range_name", range_name)
@property
@pulumi.getter(name="ipCidrRange")
def ip_cidr_range(self) -> str:
"""
The range of IP addresses belonging to this subnetwork
secondary range.
"""
return pulumi.get(self, "ip_cidr_range")
@property
@pulumi.getter(name="rangeName")
def range_name(self) -> str:
"""
The name associated with this subnetwork secondary range, used
when adding an alias IP range to a VM instance.
"""
return pulumi.get(self, "range_name")
| 46.014892
| 501
| 0.667813
| 115,337
| 970,224
| 5.454928
| 0.019577
| 0.016851
| 0.024341
| 0.035575
| 0.885601
| 0.878944
| 0.873082
| 0.865812
| 0.861018
| 0.855784
| 0
| 0.006885
| 0.26048
| 970,224
| 21,084
| 502
| 46.017075
| 0.869986
| 0.462126
| 0
| 0.834025
| 1
| 0
| 0.171977
| 0.107407
| 0
| 0
| 0
| 0
| 0
| 1
| 0.186317
| false
| 0.00081
| 0.000607
| 0.046352
| 0.373242
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
2d44db217bf5d4003f105a6341c461b9e4e540da
| 52,125
|
py
|
Python
|
glavni.py
|
MatijaGH/wololo
|
99ad22cd48f3af18ecfa45104f663d4262b93172
|
[
"MIT"
] | null | null | null |
glavni.py
|
MatijaGH/wololo
|
99ad22cd48f3af18ecfa45104f663d4262b93172
|
[
"MIT"
] | 2
|
2019-04-25T13:24:39.000Z
|
2019-05-13T20:54:49.000Z
|
glavni.py
|
MatijaGH/wololo
|
99ad22cd48f3af18ecfa45104f663d4262b93172
|
[
"MIT"
] | null | null | null |
import bottle
from bottle import *
#import auth as auth
import psycopg2, psycopg2.extensions, psycopg2.extras
import hashlib
import webbrowser
from datetime import date
#priklop na bazo
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) #da imamo lahko sumnike
baza = psycopg2.connect(database='sem2019_matijagh', host='baza.fmf.uni-lj.si', user='matijagh', password='f3wl64em')
#baza = psycopg2.connect(database=auth.db, host=auth.host, user=auth.user, password=auth.password)
baza.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) # onemogocimo transakcije
cur = baza.cursor(cursor_factory=psycopg2.extras.DictCursor)
#bottle.TEMPLATE_PATH.insert(0,"./CoolAdmin-master")
###Pomožne funkcije
def is_int(input):
try:
num = int(input)
except ValueError:
return False
return True
###Stanje 0 - oseba še ni videla ponudbe
###Stanje 1- oseba je sprejela ponudbo
###Stanje 2 - oseba je zavrnila ponudbo
################
#test priklopa na bazo(ni še v redu, popraviti moram program za tabelo)
##def test():
## cur.execute('''
## SELECT * FROM uporabnik WHERE vloga = 'agent;'
## ''')
## return (cur.fetchall())
##
##print(test())
##def test2():
## cur.execute('''
## SELECT * FROM uporabnik WHERE vloga = 'igralec;'
## ''')
## return (cur.fetchall())
##print(test2())
################
#bottle uvod, pomozne funkcije
static_dir = "./static"
secret = "to skrivnost je zelo tezko uganiti 1094107c907cw982982c42"
def vloga(user):
cur.execute("SELECT pooblastilo FROM uporabnik WHERE username=%s",
[user])
r = cur.fetchone()[0]
return r
#def password_md5(s):
"""Vrni MD5 hash danega UTF-8 niza. Gesla vedno spravimo v bazo
kodirana s to funkcijo."""
#p = hashlib.md5()
#p.update(s.encode('utf-8'))
#return p.hexdigest()
def get_user(auto_login = True, auto_redir=False):
"""Poglej cookie in ugotovi, kdo je prijavljeni uporabnik,
vrni njegov username in ime. Ce ni prijavljen, presumeri
na stran za prijavo ali vrni None (advisno od auto_login).
"""
# Dobimo username iz piskotka
username = request.get_cookie('username', secret=secret)
print(username)
c = baza.cursor()
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
# Preverimo, ali ta uporabnik obstaja
if username is not None:
#Ce uporabnik ze prijavljen, nima smisla, da je na route login
if auto_redir:
if tmp[3] == 'igralec;':
redirect('/index-igralec/')
elif tmp[3] == 'agent;':
redirect("/index-agent/")
else:
redirect("/index-klub/")
else:
c = baza.cursor()
c.execute("SELECT uporabnik FROM uporabnik WHERE uporabnisko_ime=%s",
[username])
r = c.fetchone()
c.close ()
if r is not None:
# uporabnik obstaja, vrnemo njegove podatke
return r
# Ce pridemo do sem, uporabnik ni prijavljen, naredimo redirect
if auto_login:
redirect('/login/')
else:
return None
#tukaj bo potrebno narediti še strani, kamor želimo preusmerjati in pogledati, kaj točno so parametri
def preusmeri(parameter, pooblastilo):
if parameter == "agent":
redirect('/index-agent/')
elif parameter == "igralec":
redirect('/index-igralec/')
@route("/static/<filename:path>")
def static(filename):
"""Splosna funkcija, ki servira vse staticne datoteke iz naslova
/static/..."""
return static_file(filename, root=static_dir)
################
#bottle routes
@get("/")
def zero_get():
"Takoj preusmeri na login stran."
curuser = get_user(auto_login = False, auto_redir = True)
return template("login.html",
napaka=None,
username=None)
@get("/login/")
def login_get():
"""Serviraj formo za login."""
curuser = get_user(auto_login = False, auto_redir = True)
return template("login.html",
napaka=None,
username=None)
@post('/login/', method='post')
def do_login():
"""Obdelaj izpolnjeno formo za prijavo"""
# Uporabnisko ime, ki ga je uporabnik vpisal v formo
username = request.forms.get('username')
# Spravimo geslo v bazo
password = request.forms.get('password')
## print(username)
## print(password)
# Preverimo, ali se je uporabnik pravilno prijavil
c = baza.cursor()
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s AND geslo=%s
''', [username, password])
tmp = cur.fetchone()
# preverimo, če je uporabnik v bazi
if tmp is None:
return template("login.html",
napaka="Nepravilna prijava.",
username=None
)
else:
response.set_cookie('username', username, path='/', secret=secret)
if tmp[3] == 'igralec;':
redirect('/index-igralec/')
elif tmp[3] == 'agent;':
redirect("/index-agent/")
else:
redirect("/index-klub/")
# else:
# # Vse je v redu, nastavimo cookie in preusmerimo na glavno stran
# response.set_cookie('username', username, path='/', secret=secret)
# redirect("/index/")
###GET METODE, DA PRAVILNO POKAŽE DOMAČO STRAN
@get("/index-agent/")
def index_agent_get():
"""Serviraj formo za index-agent.html"""
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM agent WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_klub = %s AND stanje_agent = %s''',[ID,1,0])
tmp2 = cur.fetchall()
stevilo_sporocil = len(tmp2)
return template("index-agent.html", ime = ime, priimek = priimek, username = username, napaka = None, stevilo_sporocil=stevilo_sporocil)
@get("/index-igralec/")
def index_igralec_get():
"""Serviraj formo za index-igralec.html"""
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
print(ID)
cur.execute(''' SELECT * FROM igralci WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
drzava = podatki[3]
placa = podatki[4]
datum_rojstva = podatki[5]
vrednost = podatki[6]
klub_id = podatki[7]
agent_id = podatki[8]
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent = %s AND stanje_igralec=%s''',[ID,1,1,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
cur.execute('''SELECT * FROM klub WHERE id = %s''',[klub_id])
klub_vse = cur.fetchone()
klub = klub_vse[1]
klub_naslov = klub_vse[2]
cur.execute('''SELECT * FROM agent WHERE id = %s''', [agent_id])
agent_vse = cur.fetchone()
agent_ime = agent_vse[1]
agent_priimek = agent_vse[2]
#Tukaj zbere vse ponudbe, ki so za igralca in mu pošlje obvestilo.
cur.execute('''SELECT * FROM prestop WHERE igralec = %s
AND stanje_agent = %s AND stanje_klub = %s
AND stanje_igralec = %s''', [ID, '1','1','0'])
ponudbe = cur.fetchall()
print(ponudbe)
cas = request.forms.get('datetime')
print(cas)
return template("index-igralec.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username, stevilo_sporocil=stevilo_sporocil,
napaka = None, rezultat = [[0,None]])
@get("/index-klub/")
def index_klub_get():
"""Serviraj formo za index-klub.html"""
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM klub WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
naslov = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',[ID,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
###Vrednost trenutno dela le po igralcih, ki so v klubu, dalo bi se še bolj natančno
cur.execute('''SELECT vrednost FROM igralci WHERE klub=%s''',[ID])
tmp=cur.fetchall()
vrednost = 0
for i in tmp:
vrednost += i[0]
cur.execute('''SELECT * FROM igralci WHERE klub=%s ORDER BY vrednost DESC''',[ID])
nogometasi = cur.fetchall()
print(nogometasi)
return template("index-klub.html", vrednost = vrednost, stevilo_sporocil=stevilo_sporocil, ime = ime,
naslov = naslov, username = username, rezultat = [[0,None]], napaka = None,
nogometasi = nogometasi)
@post("/index-klub/")
def index_klub_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM klub WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
naslov = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',[ID,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
cur.execute('''SELECT vrednost FROM igralci WHERE klub=%s''',[ID])
tmp=cur.fetchall()
vrednost = 0
for i in tmp:
vrednost += i[0]
cur.execute('''SELECT * FROM igralci WHERE klub=%s ORDER BY vrednost DESC''',[ID])
nogometasi = cur.fetchall()
poizvedba = request.forms.get('search')
if is_int(poizvedba):
cur.execute('''SELECT * FROM igralci WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchall()
cur.execute('''SELECT * FROM agent WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchall()
cur.execute('''SELECT * FROM klub WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchall()
rezultat_poizvedbe = [[1,rezultat_poizvedbe_igralec], [2,rezultat_poizvedbe_agent], [3,rezultat_poizvedbe_klub]]
if rezultat_poizvedbe == [[1,None], [2,None], [3,None]]:
return template("index-klub.html", vrednost = vrednost, stevilo_sporocil = stevilo_sporocil,
rezultat = rezultat_poizvedbe, ime = ime, naslov = naslov, username = username,
napaka = "Uporabnik z iskanim ID ne obstaja!",
nogometasi = nogometasi)
else:
return template("index-klub.html", vrednost = vrednost,stevilo_sporocil = stevilo_sporocil, rezultat = rezultat_poizvedbe,
ime = ime, naslov = naslov, username = username, napaka = None,
nogometasi = nogometasi)
elif isinstance(poizvedba, str):
#Zaenkrat je treba ime napisati točno tako kot je v bazi, drugače ne njade, da se spremeniti s tem,
#da bi pretvoril niz iz poizvedbe
cur.execute('''SELECT * FROM igralci WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchall()
cur.execute('''SELECT * FROM agent WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchall()
cur.execute('''SELECT * FROM klub WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchall()
rezultat_poizvedbe = [[1, rezultat_poizvedbe_igralec], [2, rezultat_poizvedbe_agent], [3, rezultat_poizvedbe_klub]]
print(rezultat_poizvedbe)
if rezultat_poizvedbe == [[1,[]], [2,[]], [3,[]]]:
return template("index-klub.html", vrednost = vrednost, stevilo_sporocil = stevilo_sporocil, ime = ime, rezultat = rezultat_poizvedbe,
naslov = naslov, username = username, napaka = 'Ni rezultatov iskanja',
nogometasi = nogometasi)
else:
return template("index-klub.html", vrednost = vrednost, stevilo_sporocil = stevilo_sporocil, ime = ime, rezultat = rezultat_poizvedbe,
naslov = naslov, username = username, napaka = None,
nogometasi = nogometasi)
return template("index-klub.html", vrednost = vrednost, stevilo_sporocil = stevilo_sporocil, ime = ime, rezultat = rezultat_poizvedbe,
naslov = naslov, username = username, napaka = None,
nogometasi = nogometasi)
@get("/index-igralec/menjaj-agenta.html")
def index_igralec_menjaj_get():
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM igralci WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
drzava = podatki[3]
placa = podatki[4]
datum_rojstva = podatki[5]
vrednost = podatki[6]
klub_id = podatki[7]
agent_id = podatki[8]
cur.execute('''SELECT * FROM klub WHERE id = %s''',[klub_id])
klub_vse = cur.fetchone()
klub = klub_vse[1]
klub_naslov = klub_vse[2]
cur.execute('''SELECT * FROM agent WHERE id = %s''', [agent_id])
agent_vse = cur.fetchone()
agent_ime = agent_vse[1]
agent_priimek = agent_vse[2]
cur.execute('''SELECT * FROM agent WHERE id != %s''', [agent_id])
ostali_agenti = cur.fetchall()
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = None)
@post("/index-igralec/menjaj-agenta.html")
def index_igralec_menjaj_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM igralci WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
drzava = podatki[3]
placa = podatki[4]
datum_rojstva = podatki[5]
vrednost = podatki[6]
klub_id = podatki[7]
agent_id = podatki[8]
cur.execute('''SELECT * FROM klub WHERE id = %s''',[klub_id])
klub_vse = cur.fetchone()
klub = klub_vse[1]
klub_naslov = klub_vse[2]
cur.execute('''SELECT * FROM agent WHERE id = %s''', [agent_id])
agent_vse = cur.fetchone()
agent_ime = agent_vse[1]
agent_priimek = agent_vse[2]
cur.execute('''SELECT * FROM agent WHERE id != %s''', [agent_id])
ostali_agenti = cur.fetchall()
izbrani_agent = request.forms.get('select')
if izbrani_agent is not None:
cur.execute('''UPDATE igralci SET agent = %s''',[izbrani_agent])
baza.commit()
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = None)
else:
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = None)
poizvedba = request.forms.get('search')
if is_int(poizvedba):
cur.execute('''SELECT * FROM igralci WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchone()
cur.execute('''SELECT * FROM agent WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchone()
cur.execute('''SELECT * FROM klub WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchone()
rezultat_poizvedbe = [rezultat_poizvedbe_igralec, rezultat_poizvedbe_agent, rezultat_poizvedbe_klub]
if rezultat_poizvedbe == [None, None, None]:
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = "Uporabnik z iskanim ID ne obstaja!")
else:
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = None)
elif isinstance(poizvedba, str):
#Zaenkrat je treba ime napisati točno tako kot je v bazi, drugače ne njade, da se spremeniti s tem,
#da bi pretvoril niz iz poizvedbe
cur.execute('''SELECT * FROM igralci WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchone()
cur.execute('''SELECT * FROM agent WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchone()
cur.execute('''SELECT * FROM klub WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchone()
rezultat_poizvedbe = [rezultat_poizvedbe_igralec, rezultat_poizvedbe_agent, rezultat_poizvedbe_klub]
print(rezultat_poizvedbe)
if rezultat_poizvedbe == [None, None, None]:
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = "Uporabnik z iskanim imenom ne obstaja!")
else:
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = None)
return template("menjaj-agenta.html", klub = klub, klub_naslov = klub_naslov,
ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,
ostali_agenti = ostali_agenti, napaka = None)
###POST METODE, DA PRAVILNO DELA PO VNOSU ČESARKOLI
@post("/index-igralec/")
def index_igralec_post():
"Kaj vse lahko stori na strani index-igralec."
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM igralci WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
drzava = podatki[3]
placa = podatki[4]
datum_rojstva = podatki[5]
vrednost = podatki[6]
klub_id = podatki[7]
agent_id = podatki[8]
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent = %s AND stanje_igralec=%s''',[ID,1,1,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
cur.execute('''SELECT * FROM klub WHERE id = %s''',[klub_id])
klub_vse = cur.fetchone()
klub = klub_vse[1]
klub_naslov = klub_vse[2]
cur.execute('''SELECT * FROM agent WHERE id = %s''', [agent_id])
agent_vse = cur.fetchone()
agent_ime = agent_vse[1]
agent_priimek = agent_vse[2]
#Zaenkrat lahko išče le po id-jih, lahko bi dodali, da tudi po imenih, priimkih...
#Do crasha pride tudi, če namesto številke vnesemo niz...
poizvedba = request.forms.get('search')
if is_int(poizvedba):
cur.execute('''SELECT * FROM igralci WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchone()
cur.execute('''SELECT * FROM agent WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchone()
cur.execute('''SELECT * FROM klub WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchone()
rezultat_poizvedbe = [[1,rezultat_poizvedbe_igralec], [2,rezultat_poizvedbe_agent], [3,rezultat_poizvedbe_klub]]
if rezultat_poizvedbe == [[1,None], [2,None], [3,None]]:
return template("index-igralec.html", klub = klub, klub_naslov = klub_naslov, ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost, rezultat = rezultat_poizvedbe,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username, stevilo_sporocil=stevilo_sporocil,
napaka = "Oseba z iskanim ID ne obstaja!")
else:
return template("index-igralec.html", klub = klub, klub_naslov = klub_naslov, ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,stevilo_sporocil=stevilo_sporocil,rezultat = rezultat_poizvedbe,
napaka = None)
elif isinstance(poizvedba, str):
#Zaenkrat je treba ime napisati točno tako kot je v bazi, drugače ne njade, da se spremeniti s tem,
#da bi pretvoril niz iz poizvedbe
cur.execute('''SELECT * FROM igralci WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchall()
cur.execute('''SELECT * FROM agent WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchall()
cur.execute('''SELECT * FROM klub WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchall()
rezultat_poizvedbe = [[1, rezultat_poizvedbe_igralec], [2, rezultat_poizvedbe_agent], [3, rezultat_poizvedbe_klub]]
print(rezultat_poizvedbe)
if rezultat_poizvedbe == [[1,[]], [2,[]], [3,[]]]:
return template("index-igralec.html", klub = klub, klub_naslov = klub_naslov, ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username, stevilo_sporocil=stevilo_sporocil,
napaka = "Uporabnik z iskanim imenom ne obstaja!", rezultat = rezultat_poizvedbe)
else:
return template("index-igralec.html", klub = klub, klub_naslov = klub_naslov, ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,stevilo_sporocil=stevilo_sporocil,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username, napaka = None, rezultat=rezultat_poizvedbe)
return template("index-igralec.html", klub = klub, klub_naslov = klub_naslov, ime = ime, priimek = priimek, drzava = drzava, placa = placa,
datum_rojstva = datum_rojstva, vrednost = vrednost,
agent_ime = agent_ime, agent_priimek = agent_priimek, username = username,stevilo_sporocil=stevilo_sporocil,
napaka = None, rezultat = rezultat_poizvedbe)
@post("/index-agent/")
def index_agent_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''
SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
ID = tmp[0]
cur.execute(''' SELECT * FROM agent WHERE ID = %s''', [ID])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_klub = %s AND stanje_agent = %s''',[agent_id,1,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
poizvedba = request.forms.get('search')
if is_int(poizvedba):
cur.execute('''SELECT * FROM igralci WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchone()
cur.execute('''SELECT * FROM agent WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchone()
cur.execute('''SELECT * FROM klub WHERE id = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchone()
rezultat_poizvedbe = [rezultat_poizvedbe_igralec, rezultat_poizvedbe_agent, rezultat_poizvedbe_klub]
if rezultat_poizvedbe == [None, None, None]:
return template("index-agent.html", ime = ime, priimek = priimek, username = username,
napaka = "Uporabnik z iskanim ID ne obstaja!")
else:
return template("index-agent.html", ime = ime, priimek = priimek, username = username, napaka = None)
elif isinstance(poizvedba, str):
#Zaenkrat je treba ime napisati točno tako kot je v bazi, drugače ne njade, da se spremeniti s tem,
#da bi pretvoril niz iz poizvedbe
cur.execute('''SELECT * FROM igralci WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_igralec = cur.fetchone()
cur.execute('''SELECT * FROM agent WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_agent = cur.fetchone()
cur.execute('''SELECT * FROM klub WHERE ime = %s''', [poizvedba])
rezultat_poizvedbe_klub = cur.fetchone()
rezultat_poizvedbe = [rezultat_poizvedbe_igralec, rezultat_poizvedbe_agent, rezultat_poizvedbe_klub]
print(rezultat_poizvedbe)
if rezultat_poizvedbe == [None, None, None]:
return template("index-agent.html", ime = ime, priimek = priimek, username = username,
napaka = "Uporabnik z iskanim imenom ne obstaja!")
else:
return template("index-agent.html", ime = ime, priimek = priimek, username = username, napaka = None)
return template("index-agent.html", ime = ime, priimek = priimek, username = username, napaka = None)
@get("/prestopi/")
def prestopi_get():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
igralec_id = tmp[0]
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent = %s AND stanje_igralec=%s''',
[igralec_id, 1,1,0])
v_ponudbe = cur.fetchall()
nogometasi = []
cur.execute(''' SELECT * FROM igralci WHERE ID = %s''', [igralec_id])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent = %s AND stanje_igralec=%s''',[igralec_id,1,1,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
for v_klub in v_ponudbe:
cur.execute('''SELECT * FROM klub WHERE id=%s''',[v_klub[6]])
tmp = cur.fetchone()
klub_ime = tmp[1]
klub_naslov = tmp[2]
cur.execute('''SELECT vrednost FROM igralci WHERE klub=%s''',[v_klub[6]])
tmp2=cur.fetchall()
vrednost = 0
placa = v_klub[2]
cena_prestopa = v_klub[1]
for i in tmp2:
vrednost += i[0]
nogometasi.append([klub_ime,klub_naslov,vrednost, placa, cena_prestopa])
return template("prestopi.html", v_ponudbe = v_ponudbe,
nogometasi = nogometasi, ime = ime, priimek = priimek, napaka = None,
stevilo_sporocil = stevilo_sporocil, username = username,uspesen_prestop=None)
@post("/prestopi/")
def prestopi_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
igralec_id = tmp[0]
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent = %s AND stanje_igralec=%s''',
[igralec_id, 1,1,0])
v_ponudbe = cur.fetchall()
nogometasi = []
cur.execute(''' SELECT * FROM igralci WHERE ID = %s''', [igralec_id])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent = %s AND stanje_igralec=%s''',[igralec_id,1,1,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
for v_klub in v_ponudbe:
cur.execute('''SELECT * FROM klub WHERE id=%s''',[v_klub[6]])
tmp = cur.fetchone()
klub_ime = tmp[1]
klub_naslov = tmp[2]
cur.execute('''SELECT vrednost FROM igralci WHERE klub=%s''',[v_klub[6]])
tmp2=cur.fetchall()
vrednost = 0
placa = v_klub[2]
cena_prestopa = v_klub[1]
for i in tmp2:
vrednost += i[0]
nogometasi.append([klub_ime,klub_naslov,vrednost, placa, cena_prestopa])
gumb = request.forms.get('select')
akcija = gumb[0]
igralec = int(gumb[1:])
if akcija == 's':
cur.execute('''UPDATE prestop SET stanje_igralec = %s WHERE id = %s''',
[1,igralec])
cur.execute('''UPDATE igralci SET klub = %s WHERE id =%s''',
[v_klub[6],igralec_id])
cur.execute('''UPDATE igralci SET plača = %s WHERE id =%s''',
[v_klub[2],igralec_id])
cur.execute('''UPDATE igralci SET vrednost = %s WHERE id =%s''',
[v_klub[1],igralec_id])
return template("prestopi.html", v_ponudbe = v_ponudbe,
nogometasi = nogometasi, ime = ime, priimek = priimek, napaka = None,
stevilo_sporocil = stevilo_sporocil, username = username,
uspesen_prestop="Uspešno ste opravili prestop, vaša nova plača je "+str(v_klub[2])+", vaša nova vrednost pa je "+str(v_klub[1]))
else:
cur.execute('''UPDATE prestop SET stanje_igralec = %s WHERE id = %s''',
[2,igralec])
cur.execute('''SELECT * FROM prestop WHERE igralec = %s AND stanje_klub = %s AND stanje_agent =%s And stanje_igralec=%s''',
[igralec_id, 1, 1,0])
v_ponudbe = cur.fetchall()
return template("prestopi.html", v_ponudbe = v_ponudbe,
nogometasi = nogometasi, ime = ime, priimek = priimek, napaka = None,
stevilo_sporocil = stevilo_sporocil, username = username, uspesen_prestop=None)
@get("/register/")
def register_get():
"""Serviraj formo za registracijo"""
curuser = get_user(auto_login = False, auto_redir = True)
return template("register.html", username = None, ime = None,
priimek = None, vloga = None, email = None,
DatumRojstva = None, geslo = None, geslo2 = None,
naslov = None, država = None, napaka = None)
@post("/register/", method = 'post')
def nov_zahtevek():
vloga= request.forms.get('vloga')
#Za igralca
if vloga == '2':
username = request.forms.get('username_igralec')
ime = request.forms.get('ime_igralec')
priimek = request.forms.get('priimek_igralec')
email = request.forms.get('email')
DatumRojstva = request.forms.get('DatumRojstva_igralec')
država = request.forms.get('drzava')
geslo = request.forms.get('geslo_igralec')
geslo2 = request.forms.get('geslo2_igralec')
#Za klub
elif vloga == '3':
username_klub = request.forms.get('username_klub')
ime_klub = request.forms.get('ime_klub')
naslov_klub = request.forms.get('naslov_klub')
geslo = request.forms.get('geslo_klub')
geslo2 = request.forms.get('geslo2_klub')
## print(vloga)
## print(username_klub)
## print(ime_klub)
## print(geslo)
## print(geslo2)
elif vloga == '1':
username = request.forms.get('username_agent')
ime = request.forms.get('ime_agent')
priimek = request.forms.get('priimek_agent')
email = request.forms.get('email_agent')
geslo = request.forms.get('geslo_agent')
geslo2 = request.forms.get('geslo2_agent')
c1 = baza.cursor()
c1.execute("SELECT * FROM uporabnik WHERE uporabnisko_ime=%s",
[username])
tmp = c1.fetchone()
if tmp is not None:
return template("register.html", username = username, ime = ime,
priimek = priimek, vloga = vloga, email = email,
geslo = geslo, geslo2 = geslo2, napaka="Uporabniško ime je že zavzeto, izberi novega.")
#preverimo, ali se gesli ujemata
if geslo != geslo2:
return template("register.html", username = username, ime = ime,
priimek = priimek, vloga = vloga, email = email, napaka="Gesli se ne ujemata!")
#ce pridemo, do sem, je vse uredu in lahko vnesemo zahtevek v bazo
#pogledam največji id, da bom dodajal za tem
c1.execute("SELECT id FROM uporabnik ORDER BY id DESC LIMIT 1")
dolzina = c1.fetchone()[0]
id = dolzina + 1
c = baza.cursor()
#vnesem agenta v bazo
if vloga == '1':
c.execute("""INSERT INTO uporabnik (id, uporabnisko_ime, geslo, vloga)
VALUES (%s, %s, %s, %s)""",
[id, username, geslo, 'agent;'])
c.execute("""INSERT INTO agent (id, ime, priimek)
VALUES (%s, %s, %s) """,
[id, ime, priimek])
return template("register.html", username = None, ime = None,
priimek = None, vloga = None, email = None,
starost = None, geslo = None, geslo2 = None,
naslov = None, napaka="Prošnja poslana uspešno!")
#vnesem igralca v bazo
elif vloga == '2':
c.execute("""INSERT INTO uporabnik (id, uporabnisko_ime, geslo, vloga)
VALUES (%s, %s, %s, %s)""",
[id, username, geslo, 'igralec;'])
c.execute("""INSERT INTO igralci (id, ime, priimek, država, plača, datum_rojstva, vrednost, klub, agent)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) """,
[id, ime, priimek, država, '0', DatumRojstva, '0', None, None])
print("Uspeh!")
return template("register.html", username = None, ime = None,
priimek = None, vloga = None, email = None,
starost = None, geslo = None, geslo2 = None,
naslov = None, napaka="Prošnja poslana uspešno!")
#vnesem klub v bazo
elif vloga == '3':
print('Gasa')
c.execute("""INSERT INTO uporabnik (id, uporabnisko_ime, geslo, vloga)
VALUES (%s, %s, %s, %s)""",
[id, username, geslo, 'klub;'])
c.execute("""INSERT INTO klub (id, ime, naslov)
VALUES (%s, %s, %s) """,
[id, ime, naslov])
print("Uspeh!")
return template("register.html", username = None, ime = None,
priimek = None, vloga = None, email = None,
starost = None, geslo = None, geslo2 = None,
naslov = None, napaka="Prošnja poslana uspešno!")
@get("/logout/")
def logout():
"""Pobrisi cookie in preusmeri na login."""
response.delete_cookie('username', path='/', secret=secret)
#print(get_user())
redirect('/login/')
@get("/forget-pass/")
def forget_pass_get():
"""Serviraj formo za pozabjeno geslo"""
curuser = get_user(auto_login = False, auto_redir = True)
return template("forget-pass.html")
@get("/form/")
def form_get():
"""Serviraj formo za form"""
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
agent_id = tmp[0]
cur.execute(''' SELECT * FROM agent WHERE ID = %s''', [agent_id])
podatki = cur.fetchone()
ime = podatki[1]
print(ime)
priimek = podatki[2]
cur.execute('''SELECT * FROM igralci WHERE agent=%s''',
[agent_id])
agentovi_igralci = cur.fetchall()
cur.execute('''SELECT * FROM klub''')
vsi_klubi = cur.fetchall()
###Ni še povsem v redu, ker lahko prodaš igralca tudi že zdajšnjemu klubu.
return template("form.html", username=username, ime=ime, priimek=priimek,agentovi_igralci = agentovi_igralci,
vsi_klubi = vsi_klubi, sporocilo = None)
@post("/form/")
def form_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
agent_id = tmp[0]
cur.execute(''' SELECT * FROM agent WHERE ID = %s''', [agent_id])
podatki = cur.fetchone()
ime = podatki[1]
print(ime)
priimek = podatki[2]
cur.execute('''SELECT * FROM igralci WHERE agent=%s''',
[agent_id])
agentovi_igralci = cur.fetchall()
cur.execute('''SELECT * FROM klub''')
vsi_klubi = cur.fetchall()
"Kaj naj naredi po vnosu."
znesek_prestopa = request.forms.get('cc-payment')
placa = request.forms.get('cc-placa')
select = request.forms.get('select')
izbran_klub = request.forms.get('izbran_klub')
cur.execute('''SELECT klub FROM igralci WHERE id = %s''',[select])
iz_kluba = cur.fetchone()[0]
print(iz_kluba == izbran_klub)
if iz_kluba == int(izbran_klub):
return template("form.html", username=username, ime=ime, priimek=priimek,agentovi_igralci = agentovi_igralci,
vsi_klubi = vsi_klubi,
sporocilo = 'Igralec je že v tem klubu.')
else:
danes = date.today()
checkbox = request.forms.get('checkbox1')
if checkbox == 'option1':
oznaka = False
else:
oznaka = True
if is_int(placa) and is_int(znesek_prestopa) and int(placa) > 0 and int(znesek_prestopa) > 1000:
cur.execute('''INSERT INTO prestop (cena, placa, datum, igralec,iz_kluba,
v_klub, agent,
stanje_agent, stanje_klub, stanje_igralec, renegotiable)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)''',
[znesek_prestopa, placa, danes,select,iz_kluba, izbran_klub,
agent_id, 1, 0, 0, oznaka])
return template("form.html", username=username, ime=ime, priimek=priimek,agentovi_igralci = agentovi_igralci,
vsi_klubi = vsi_klubi, sporocilo = 'Ponudba uspešno poslana.')
else:
return template("form.html", username=username, ime=ime, priimek=priimek,agentovi_igralci = agentovi_igralci,
vsi_klubi = vsi_klubi,
sporocilo = 'Podatki niso pravilni, vnesite številsko vrednost, cena mora biti večja od 1000, placa pa vecja od 0.')
@get("/form-klub/")
def form_get():
"""Serviraj formo za form"""
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
klub_id = tmp[0]
cur.execute('''SELECT * FROM igralci WHERE klub != %s''',[klub_id])
vsi_ostali_igralci = cur.fetchall()
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',[klub_id,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
cur.execute(''' SELECT * FROM klub WHERE ID = %s''', [klub_id])
podatki = cur.fetchone()
ime = podatki[1]
naslov = podatki[2]
###Ni še povsem v redu, ker lahko prodaš igralca tudi že zdajšnjemu klubu.
return template("form-klub.html", stevilo_sporocil = stevilo_sporocil,
vsi_ostali_igralci = vsi_ostali_igralci,
sporocilo = None, napaka = None, ime = ime, naslov = naslov, username = username)
@post("/form-klub/")
def form_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
klub_id = tmp[0]
cur.execute('''SELECT * FROM igralci WHERE klub != %s''',[klub_id])
vsi_ostali_igralci = cur.fetchall()
"Kaj naj naredi po vnosu."
znesek_prestopa = request.forms.get('cc-payment')
placa = request.forms.get('cc-placa')
select = request.forms.get('select')
cur.execute('''SELECT klub FROM igralci WHERE id = %s''',[select])
iz_kluba = cur.fetchone()[0]
danes = date.today()
cur.execute('''SELECT agent FROM igralci WHERE id = %s''',[select])
agent_id = cur.fetchone()[0]
cur.execute(''' SELECT * FROM klub WHERE ID = %s''', [klub_id])
podatki = cur.fetchone()
ime = podatki[1]
naslov = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',[klub_id,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
checkbox = request.forms.get('checkbox1')
if checkbox == 'option1':
oznaka = False
else:
oznaka = True
if is_int(placa) and is_int(znesek_prestopa):
None
else:
return template("form-klub.html", vsi_ostali_igralci = vsi_ostali_igralci,
sporocilo = 'Vnesi številsko vrednost za plačo in ceno prestopa.',
stevilo_sporocil = stevilo_sporocil, napaka = None, ime = ime,
naslov = naslov, username = username)
cur.execute('''INSERT INTO prestop (cena,placa, datum, igralec,iz_kluba,
v_klub, agent,
stanje_agent, stanje_klub, stanje_igralec, renegotiable)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)''',
[znesek_prestopa, placa, danes,select,iz_kluba, klub_id,
agent_id, 0, 1, 0, oznaka])
###Ostaja vprašanje, kaj narediti s poizvedbami...
return template("form-klub.html",vsi_ostali_igralci = vsi_ostali_igralci,
sporocilo = 'Ponudba uspešno poslana.', stevilo_sporocil = stevilo_sporocil, napaka = None
,ime = ime, naslov = naslov, username = username)
@get("/ponudbe-zame/")
def ponudbe_get():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
klub_id = tmp[0]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',
[klub_id, 0])
v_ponudbe = cur.fetchall()
nogometasi = []
cur.execute(''' SELECT * FROM klub WHERE ID = %s''', [klub_id])
podatki = cur.fetchone()
ime = podatki[1]
naslov = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',[klub_id,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
for igralec in v_ponudbe:
cur.execute('''SELECT * FROM igralci WHERE id=%s''',[igralec[4]])
tmp = cur.fetchone()
igralec_ime = tmp[1]
igralec_priimek = tmp[2]
igralec_drzava = tmp[3]
igralec_rojstvo = tmp[5]
igralec_vrednost = tmp[6]
nogometasi.append([igralec_ime,igralec_priimek,igralec_drzava,igralec_rojstvo,
igralec_vrednost])
return template("ponudbe-zame.html", v_ponudbe = v_ponudbe,
nogometasi = nogometasi, ime = ime, naslov = naslov, napaka = None,
stevilo_sporocil = stevilo_sporocil, username = username)
@post("/ponudbe-zame/")
def ponudbe_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
klub_id = tmp[0]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',
[klub_id, 0])
v_ponudbe = cur.fetchall()
nogometasi = []
cur.execute(''' SELECT * FROM klub WHERE ID = %s''', [klub_id])
podatki = cur.fetchone()
ime = podatki[1]
naslov = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',[klub_id,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
for igralec in v_ponudbe:
cur.execute('''SELECT * FROM igralci WHERE id=%s''',[igralec[4]])
tmp = cur.fetchone()
igralec_ime = tmp[1]
igralec_priimek = tmp[2]
igralec_drzava = tmp[3]
igralec_rojstvo = tmp[5]
igralec_vrednost = tmp[6]
nogometasi.append([igralec_ime,igralec_priimek,igralec_drzava,igralec_rojstvo,
igralec_vrednost])
gumb = request.forms.get('select')
akcija = gumb[0]
igralec = int(gumb[1:])
if akcija == 's':
cur.execute('''UPDATE prestop SET stanje_klub = %s WHERE id = %s''',
[1,igralec])
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',
[klub_id, 0])
v_ponudbe = cur.fetchall()
elif akcija == 'p':
cur.execute('''SELECT igralec FROM prestop WHERE id = %s''',[igralec])
tmp=cur.fetchone()[0]
cur.execute('''SELECT id, ime, priimek FROM igralci WHERE id=%s''',[tmp])
tmp=cur.fetchone()
cur.execute('''UPDATE prestop SET stanje_klub = %s WHERE id = %s''',
[2,igralec])
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',
[klub_id, 0])
v_ponudbe = cur.fetchall()
return template("form-klub.html", vsi_ostali_igralci = [tmp],
sporocilo = 'Če zapustiš to stran, se bo štelo, kot da si ponudbo zavrnil.',
ime = ime, naslov = naslov, stevilo_sporocil = stevilo_sporocil, napaka = None,
username = username)
else:
cur.execute('''UPDATE prestop SET stanje_klub = %s WHERE id = %s''',
[2,igralec])
cur.execute('''SELECT * FROM prestop WHERE v_klub = %s AND stanje_klub = %s''',
[klub_id, 0])
v_ponudbe = cur.fetchall()
return template("ponudbe-zame.html", v_ponudbe = v_ponudbe,
nogometasi=nogometasi, ime = ime, naslov = naslov, stevilo_sporocil = stevilo_sporocil,
napaka = None, username = username)
@get("/ponudba-zame-agent/")
def ponudbe_agent_get():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
agent_id = tmp[0]
print(agent_id)
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',
[agent_id, 0])
v_ponudbe = cur.fetchall()
print(v_ponudbe)
nogometasi = []
cur.execute(''' SELECT * FROM agent WHERE ID = %s''', [agent_id])
podatki = cur.fetchone()
print(podatki)
ime = podatki[1]
priimek = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',[agent_id,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
for igralec in v_ponudbe:
cur.execute('''SELECT * FROM igralci WHERE id=%s''',[igralec[4]])
tmp = cur.fetchone()
igralec_ime = tmp[1]
igralec_priimek = tmp[2]
igralec_drzava = tmp[3]
igralec_rojstvo = tmp[5]
igralec_vrednost = tmp[6]
nogometasi.append([igralec_ime,igralec_priimek,igralec_drzava,igralec_rojstvo,
igralec_vrednost])
return template("ponudba-zame-agent.html", v_ponudbe = v_ponudbe,
nogometasi = nogometasi, ime = ime, priimek = priimek, napaka = None,
stevilo_sporocil = stevilo_sporocil, username = username)
@post("/ponudba-zame-agent/")
def ponudbe_agent_post():
username = request.get_cookie('username', secret = secret)
cur.execute('''SELECT * FROM uporabnik WHERE uporabnisko_ime=%s
''', [username])
tmp = cur.fetchone()
agent_id = tmp[0]
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',
[agent_id, 0])
v_ponudbe = cur.fetchall()
nogometasi = []
cur.execute(''' SELECT * FROM agent WHERE ID = %s''', [agent_id])
podatki = cur.fetchone()
ime = podatki[1]
priimek = podatki[2]
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',[agent_id,0])
tmp = cur.fetchall()
stevilo_sporocil = len(tmp)
for igralec in v_ponudbe:
cur.execute('''SELECT * FROM igralci WHERE id=%s''',[igralec[4]])
tmp = cur.fetchone()
igralec_ime = tmp[1]
igralec_priimek = tmp[2]
igralec_drzava = tmp[3]
igralec_rojstvo = tmp[5]
igralec_vrednost = tmp[6]
nogometasi.append([igralec_ime,igralec_priimek,igralec_drzava,igralec_rojstvo,
igralec_vrednost])
gumb = request.forms.get('select')
akcija = gumb[0]
igralec = int(gumb[1:])
if akcija == 's':
cur.execute('''UPDATE prestop SET stanje_agent = %s WHERE id = %s''',
[1,igralec])
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',
[agent_id, 0])
v_ponudbe = cur.fetchall()
elif akcija == 'p':
cur.execute('''SELECT igralec FROM prestop WHERE id = %s''',[igralec])
tmp=cur.fetchone()[0]
cur.execute('''SELECT id, ime, priimek FROM igralci WHERE id=%s''',[tmp])
tmp=cur.fetchone()
cur.execute('''UPDATE prestop SET stanje_agent = %s WHERE id = %s''',
[2,igralec])
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',
[agent_id, 0])
v_ponudbe = cur.fetchall()
return template("form-klub.html", vsi_ostali_igralci = [tmp],
sporocilo = 'Če zapustiš to stran, se bo štelo, kot da si ponudbo zavrnil.',
ime = ime, naslov = naslov, stevilo_sporocil = stevilo_sporocil, napaka = None,
username = username)
else:
cur.execute('''UPDATE prestop SET stanje_agent = %s WHERE id = %s''',
[2,igralec])
cur.execute('''SELECT * FROM prestop WHERE agent = %s AND stanje_agent = %s''',
[agent_id, 0])
v_ponudbe = cur.fetchall()
return template("ponudba-zame-agent.html", v_ponudbe = v_ponudbe,
nogometasi=nogometasi, ime = ime, priimek = priimek, stevilo_sporocil = stevilo_sporocil,
napaka = None, username = username)
run(host='localhost', port=8080)
| 41.968599
| 162
| 0.608595
| 6,252
| 52,125
| 4.943058
| 0.066859
| 0.045949
| 0.06627
| 0.075071
| 0.830022
| 0.805009
| 0.79624
| 0.784106
| 0.775725
| 0.766988
| 0
| 0.008936
| 0.261487
| 52,125
| 1,241
| 163
| 42.002417
| 0.793869
| 0.069257
| 0
| 0.769467
| 0
| 0.012295
| 0.233132
| 0.003254
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030738
| false
| 0.006148
| 0.006148
| 0
| 0.096311
| 0.018443
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
742d0c5e6dfce17d9b5c1d865679491a76a26bae
| 10,088
|
py
|
Python
|
tests/frontend/test_authentication.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
tests/frontend/test_authentication.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
tests/frontend/test_authentication.py
|
FlorianRhiem/sampledb
|
3363adbe5f2771d1178a5b6d530be960ce41c560
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
"""
import requests
import pytest
from bs4 import BeautifulSoup
import sampledb
import sampledb.models
from tests.test_utils import flask_server, app
@pytest.fixture
def user(flask_server):
with flask_server.app.app_context():
user = sampledb.models.User(name="Basic User", email="example@fz-juelich.de", type=sampledb.models.UserType.PERSON)
sampledb.db.session.add(user)
sampledb.db.session.commit()
# force attribute refresh
assert user.id is not None
return user
def test_sign_in(flask_server):
session = requests.session()
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
# initially, the a link to the sign in page will be displayed
r = session.get(flask_server.base_url)
assert r.status_code == 200
assert '/users/me/sign_in' in r.content.decode('utf-8')
# the sign in page contains a form with fields for username, password and a remember_me checkbox
r = session.get(flask_server.base_url + 'users/me/sign_in')
assert r.status_code == 200
document = BeautifulSoup(r.content, 'html.parser')
assert document.find('input', {'name': 'username', 'type': 'text'}) is not None
assert document.find('input', {'name': 'password', 'type': 'password'}) is not None
assert document.find('input', {'name': 'remember_me', 'type': 'checkbox'}) is not None
# it also contains a hidden CSRF token
assert document.find('input', {'name': 'csrf_token', 'type': 'hidden'}) is not None
csrf_token = document.find('input', {'name': 'csrf_token'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_in', {
'username': flask_server.app.config['TESTING_LDAP_LOGIN'],
'password': flask_server.app.config['TESTING_LDAP_PW'],
'remember_me': False,
'csrf_token': csrf_token
})
assert r.status_code == 200
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is True
def test_sign_in_redirect(flask_server):
session = requests.session()
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
# initially, the a link to the sign in page will be displayed
r = session.get(flask_server.base_url)
assert r.status_code == 200
assert '/users/me/sign_in' in r.content.decode('utf-8')
# the sign in page contains a form with fields for username, password and a remember_me checkbox
r = session.get(flask_server.base_url + 'users/me/sign_in')
assert r.status_code == 200
document = BeautifulSoup(r.content, 'html.parser')
assert document.find('input', {'name': 'username', 'type': 'text'}) is not None
assert document.find('input', {'name': 'password', 'type': 'password'}) is not None
assert document.find('input', {'name': 'remember_me', 'type': 'checkbox'}) is not None
# it also contains a hidden CSRF token
assert document.find('input', {'name': 'csrf_token', 'type': 'hidden'}) is not None
csrf_token = document.find('input', {'name': 'csrf_token'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_in?next=/actions/', {
'username': flask_server.app.config['TESTING_LDAP_LOGIN'],
'password': flask_server.app.config['TESTING_LDAP_PW'],
'remember_me': False,
'csrf_token': csrf_token
}, allow_redirects=False)
assert r.status_code == 302
assert r.headers['Location'] == flask_server.base_url + 'actions/'
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is True
def test_sign_in_invalid_redirect(flask_server):
session = requests.session()
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
# initially, the a link to the sign in page will be displayed
r = session.get(flask_server.base_url)
assert r.status_code == 200
assert '/users/me/sign_in' in r.content.decode('utf-8')
# the sign in page contains a form with fields for username, password and a remember_me checkbox
r = session.get(flask_server.base_url + 'users/me/sign_in')
assert r.status_code == 200
document = BeautifulSoup(r.content, 'html.parser')
assert document.find('input', {'name': 'username', 'type': 'text'}) is not None
assert document.find('input', {'name': 'password', 'type': 'password'}) is not None
assert document.find('input', {'name': 'remember_me', 'type': 'checkbox'}) is not None
# it also contains a hidden CSRF token
assert document.find('input', {'name': 'csrf_token', 'type': 'hidden'}) is not None
csrf_token = document.find('input', {'name': 'csrf_token'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_in?next=http://google.de/', {
'username': flask_server.app.config['TESTING_LDAP_LOGIN'],
'password': flask_server.app.config['TESTING_LDAP_PW'],
'remember_me': False,
'csrf_token': csrf_token
}, allow_redirects=False)
assert r.status_code == 302
assert r.headers['Location'] == flask_server.base_url
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is True
def test_sign_in_invalid_password(flask_server):
session = requests.session()
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
# get the csrf token
r = session.get(flask_server.base_url + 'users/me/sign_in')
document = BeautifulSoup(r.content, 'html.parser')
csrf_token = document.find('input', {'name': 'csrf_token'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_in', {
'username': flask_server.app.config['TESTING_LDAP_LOGIN'],
'password': 'invalid',
'remember_me': False,
'csrf_token': csrf_token
})
assert r.status_code == 200
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
def test_sign_in_missing_password(flask_server):
session = requests.session()
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
# get the csrf token
r = session.get(flask_server.base_url + 'users/me/sign_in')
document = BeautifulSoup(r.content, 'html.parser')
csrf_token = document.find('input', {'name': 'csrf_token'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_in', {
'username': flask_server.app.config['TESTING_LDAP_LOGIN'],
'remember_me': False,
'csrf_token': csrf_token
})
assert r.status_code == 200
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
def test_sign_in_authenticated(flask_server, user):
session = requests.session()
assert session.get(flask_server.base_url + 'users/{}/autologin'.format(user.id)).status_code == 200
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is True
# when already authenticated, the sign in page should instantly redirect
r = session.get(flask_server.base_url + 'users/me/sign_in', allow_redirects=False)
assert r.status_code == 302
def test_sign_out_navbar(flask_server, user):
session = requests.session()
assert session.get(flask_server.base_url + 'users/{}/autologin'.format(user.id)).status_code == 200
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is True
# when logged in, the navbar contains a sign out form
r = session.get(flask_server.base_url + '')
assert r.status_code == 200
document = BeautifulSoup(r.content, 'html.parser')
sign_out_form = document.find('form', {'action': '/users/me/sign_out', 'method': 'post'})
assert sign_out_form is not None
# the form contains a submit button
assert sign_out_form.find('button', {'type': 'submit'}) is not None
# it also contains a hidden CSRF token
assert sign_out_form.find('input', {'name': 'csrf_token', 'type': 'hidden'}) is not None
csrf_token = sign_out_form.find('input', {'name': 'csrf_token', 'type': 'hidden'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_out', {
'csrf_token': csrf_token
})
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
def test_sign_out_page(flask_server, user):
session = requests.session()
assert session.get(flask_server.base_url + 'users/{}/autologin'.format(user.id)).status_code == 200
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is True
# when logged in, the sign out page contains a sign out form
r = session.get(flask_server.base_url + 'users/me/sign_out')
assert r.status_code == 200
document = BeautifulSoup(r.content, 'html.parser')
sign_out_form = document.find('form', {'action': '/users/me/sign_out', 'method': 'post'})
assert sign_out_form is not None
# the form contains a submit button
assert sign_out_form.find('button', {'type': 'submit'}) is not None
# it also contains a hidden CSRF token
assert sign_out_form.find('input', {'name': 'csrf_token', 'type': 'hidden'}) is not None
csrf_token = sign_out_form.find('input', {'name': 'csrf_token', 'type': 'hidden'})['value']
# submit the form
r = session.post(flask_server.base_url + 'users/me/sign_out', {
'csrf_token': csrf_token
})
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
def test_sign_out_page_unauthenticated(flask_server, user):
session = requests.session()
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
# when not authenticated, the sign out page redirects to the sign in page
r = session.get(flask_server.base_url + 'users/me/sign_out', allow_redirects=False)
assert r.status_code == 302
assert r.headers['Location'].startswith(flask_server.base_url + 'users/me/sign_in')
assert session.get(flask_server.base_url + 'users/me/loginstatus').json() is False
| 47.584906
| 123
| 0.692605
| 1,443
| 10,088
| 4.662509
| 0.087318
| 0.101367
| 0.093639
| 0.112366
| 0.909483
| 0.907253
| 0.907253
| 0.907253
| 0.901457
| 0.896849
| 0
| 0.007033
| 0.168418
| 10,088
| 211
| 124
| 47.810427
| 0.79497
| 0.114889
| 0
| 0.785714
| 0
| 0
| 0.217748
| 0.005849
| 0
| 0
| 0
| 0
| 0.38961
| 1
| 0.064935
| false
| 0.058442
| 0.038961
| 0
| 0.11039
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
743ca65fc7631a889a0f30b8f51c20038cf15669
| 74,789
|
py
|
Python
|
venv/lib/python3.8/site-packages/azureml/_restclient/operations/artifact_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azureml/_restclient/operations/artifact_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | null | null | null |
venv/lib/python3.8/site-packages/azureml/_restclient/operations/artifact_operations.py
|
amcclead7336/Enterprise_Data_Science_Final
|
ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28
|
[
"Unlicense",
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator 2.3.33.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class ArtifactOperations(object):
"""ArtifactOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def create(
self, subscription_id, resource_group_name, workspace_name, artifact_dto=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param artifact_dto:
:type artifact_dto: ~_restclient.models.ArtifactDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ArtifactDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.ArtifactDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if artifact_dto is not None:
body_content = self._serialize.body(artifact_dto, 'ArtifactDto')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArtifactDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/metadata'}
def get(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ArtifactDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.ArtifactDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArtifactDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/metadata/{origin}/{container}/{path}'}
def delete_meta_data(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, hard_delete=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id:
:type subscription_id: str
:param resource_group_name:
:type resource_group_name: str
:param workspace_name:
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param hard_delete:
:type hard_delete: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.delete_meta_data.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if hard_delete is not None:
query_parameters['hardDelete'] = self._serialize.query("hard_delete", hard_delete, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_meta_data.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/metadata/{origin}/{container}/{path}'}
def list_in_container(
self, subscription_id, resource_group_name, workspace_name, origin, container, continuation_token=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param continuation_token:
:type continuation_token: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedArtifactDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedArtifactDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.list_in_container.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedArtifactDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list_in_container.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/{origin}/{container}'}
def list_in_path(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, continuation_token=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id:
:type subscription_id: str
:param resource_group_name:
:type resource_group_name: str
:param workspace_name:
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param continuation_token:
:type continuation_token: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedArtifactDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedArtifactDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.list_in_path.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedArtifactDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list_in_path.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/{origin}/{container}/{path}'}
def download(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, custom_headers=None, raw=False, callback=None, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param callback: When specified, will be called with each chunk of
data that is streamed. The callback should take two arguments, the
bytes of the current chunk of data and the response object. If the
data is uploading, response will be None.
:type callback: Callable[Bytes, response=None]
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: object or ClientRawResponse if raw=true
:rtype: Generator or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.download.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=True, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._client.stream_download(response, callback)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
download.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/content/{origin}/{container}/{path}'}
def upload(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, content=None, index=None, append=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param content:
:type content: str
:param index:
:type index: int
:param append:
:type append: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ArtifactDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.ArtifactDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.upload.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if index is not None:
query_parameters['index'] = self._serialize.query("index", index, 'int')
if append is not None:
query_parameters['append'] = self._serialize.query("append", append, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/octet-stream'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if content is not None:
body_content = self._serialize.body(content, 'str')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArtifactDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
upload.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/content/{origin}/{container}/{path}'}
def get_content_information(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ArtifactContentInformationDto or ClientRawResponse if
raw=true
:rtype: ~_restclient.models.ArtifactContentInformationDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_content_information.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArtifactContentInformationDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_content_information.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/contentinfo/{origin}/{container}/{path}'}
def get_write_sas(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ArtifactContentInformationDto or ClientRawResponse if
raw=true
:rtype: ~_restclient.models.ArtifactContentInformationDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_write_sas.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArtifactContentInformationDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_write_sas.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/write/{origin}/{container}/{path}'}
def list_sas_by_prefix(
self, subscription_id, resource_group_name, workspace_name, origin, container, path, continuation_token=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param continuation_token:
:type continuation_token: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedArtifactContentInformationDto or ClientRawResponse
if raw=true
:rtype: ~_restclient.models.PaginatedArtifactContentInformationDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.list_sas_by_prefix.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if continuation_token is not None:
query_parameters['continuationToken'] = self._serialize.query("continuation_token", continuation_token, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedArtifactContentInformationDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list_sas_by_prefix.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/prefix/contentinfo/{origin}/{container}/{path}'}
def batch_ingest_from_sas(
self, subscription_id, resource_group_name, workspace_name, origin, container, command=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param command:
:type command:
~_restclient.models.BatchArtifactContainerSasIngestCommand
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedArtifactDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedArtifactDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.batch_ingest_from_sas.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if command is not None:
body_content = self._serialize.body(command, 'BatchArtifactContainerSasIngestCommand')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedArtifactDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
batch_ingest_from_sas.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/batch/ingest/containersas/{origin}/{container}'}
def batch_create_empty_artifacts(
self, subscription_id, resource_group_name, workspace_name, origin, container, datastore_name=None, command=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param datastore_name: The name of the datastore used when uploading
the artifact.
:type datastore_name: str
:param command:
:type command: ~_restclient.models.BatchArtifactCreateCommand
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: BatchArtifactContentInformationDto or ClientRawResponse if
raw=true
:rtype: ~_restclient.models.BatchArtifactContentInformationDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.batch_create_empty_artifacts.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if datastore_name is not None:
query_parameters['datastoreName'] = self._serialize.query("datastore_name", datastore_name, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if command is not None:
body_content = self._serialize.body(command, 'BatchArtifactCreateCommand')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BatchArtifactContentInformationDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
batch_create_empty_artifacts.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/batch/metadata/{origin}/{container}'}
def delete_meta_data_in_container(
self, subscription_id, resource_group_name, workspace_name, origin, container, hard_delete=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id:
:type subscription_id: str
:param resource_group_name:
:type resource_group_name: str
:param workspace_name:
:type workspace_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param hard_delete:
:type hard_delete: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.delete_meta_data_in_container.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if hard_delete is not None:
query_parameters['hardDelete'] = self._serialize.query("hard_delete", hard_delete, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_meta_data_in_container.metadata = {'url': '/artifact/v2.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/artifacts/batch/{origin}/{container}'}
def delete_all_in_workspace(
self, workspace_id, custom_headers=None, raw=False, **operation_config):
"""
:param workspace_id:
:type workspace_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.delete_all_in_workspace.metadata['url']
path_format_arguments = {
'workspaceId': self._serialize.url("workspace_id", workspace_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
delete_all_in_workspace.metadata = {'url': '/artifact/v1.0/workspaces/{workspaceId}'}
def upsert_asset(
self, subscription_id, resource_group_name, workspace_name, project_name, origin, container, path, asset_dto=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param asset_dto:
:type asset_dto: ~_restclient.models.AssetDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AssetDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.AssetDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.upsert_asset.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if asset_dto is not None:
body_content = self._serialize.body(asset_dto, 'AssetDto')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AssetDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
upsert_asset.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/artifacts/asset/{origin}/{container}/{path}'}
def create_new_asset(
self, subscription_id, resource_group_name, workspace_name, project_name, origin, container, path, asset_dto=None, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param origin:
:type origin: str
:param container:
:type container: str
:param path:
:type path: str
:param asset_dto:
:type asset_dto: ~_restclient.models.AssetDto
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AssetDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.AssetDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.create_new_asset.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'origin': self._serialize.url("origin", origin, 'str'),
'container': self._serialize.url("container", container, 'str'),
'path': self._serialize.url("path", path, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json-patch+json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if asset_dto is not None:
body_content = self._serialize.body(asset_dto, 'AssetDto')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AssetDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
create_new_asset.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/artifacts/asset/{origin}/{container}/{path}'}
def get_assets(
self, subscription_id, resource_group_name, workspace_name, project_name, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedAssetDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedAssetDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_assets.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedAssetDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_assets.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets'}
def get_asset_by_name(
self, subscription_id, resource_group_name, workspace_name, project_name, name, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param name:
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AssetDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.AssetDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_asset_by_name.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'name': self._serialize.url("name", name, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AssetDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_asset_by_name.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets/name/{name}'}
def get_asset_by_id(
self, subscription_id, resource_group_name, workspace_name, project_name, id, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param id:
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AssetDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.AssetDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_asset_by_id.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'id': self._serialize.url("id", id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AssetDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_asset_by_id.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets/id/{id}'}
def get_asset_versions_by_asset_name(
self, subscription_id, resource_group_name, workspace_name, project_name, name, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param name:
:type name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedAssetVersionDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedAssetVersionDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_asset_versions_by_asset_name.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'name': self._serialize.url("name", name, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedAssetVersionDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_asset_versions_by_asset_name.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets/name/{name}/versions'}
def get_single_asset_version_by_asset_name(
self, subscription_id, resource_group_name, workspace_name, project_name, name, version, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param name:
:type name: str
:param version:
:type version: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AssetVersionDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.AssetVersionDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_single_asset_version_by_asset_name.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'name': self._serialize.url("name", name, 'str', skip_quote=True),
'version': self._serialize.url("version", version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AssetVersionDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_single_asset_version_by_asset_name.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets/name/{name}/versions/{version}'}
def get_asset_versions_by_asset_id(
self, subscription_id, resource_group_name, workspace_name, project_name, id, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param id:
:type id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PaginatedAssetVersionDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.PaginatedAssetVersionDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_asset_versions_by_asset_id.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'id': self._serialize.url("id", id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PaginatedAssetVersionDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_asset_versions_by_asset_id.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets/id/{id}/versions'}
def get_single_asset_version_by_asset_id(
self, subscription_id, resource_group_name, workspace_name, project_name, id, version, custom_headers=None, raw=False, **operation_config):
"""
:param subscription_id: The Azure Subscription ID.
:type subscription_id: str
:param resource_group_name: The Name of the resource group in which
the workspace is located.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param project_name:
:type project_name: str
:param id:
:type id: str
:param version:
:type version: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: AssetVersionDto or ClientRawResponse if raw=true
:rtype: ~_restclient.models.AssetVersionDto or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<_restclient.models.ErrorResponseException>`
"""
# Construct URL
url = self.get_single_asset_version_by_asset_id.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("subscription_id", subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'projectName': self._serialize.url("project_name", project_name, 'str'),
'id': self._serialize.url("id", id, 'str'),
'version': self._serialize.url("version", version, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AssetVersionDto', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get_single_asset_version_by_asset_id.metadata = {'url': '/artifact/v1.0/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/projects/{projectName}/assets/id/{id}/versions/{version}'}
| 46.194565
| 278
| 0.656393
| 7,523
| 74,789
| 6.311977
| 0.028579
| 0.037507
| 0.041108
| 0.021312
| 0.952385
| 0.94613
| 0.941855
| 0.940255
| 0.934358
| 0.932526
| 0
| 0.003627
| 0.251615
| 74,789
| 1,618
| 279
| 46.223115
| 0.844759
| 0.299723
| 0
| 0.802594
| 0
| 0.0317
| 0.20448
| 0.104108
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034582
| false
| 0
| 0.002882
| 0
| 0.102305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ac833f94759d44f24c46e9c941cfc3f31e1fded
| 152
|
py
|
Python
|
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/transformations/constantfolding/data/wrong_assoc/out_2.py
|
JetBrains-Research/ast-transformations
|
0ab408af3275b520cc87a473f418c4b4dfcb0284
|
[
"MIT"
] | 8
|
2021-01-19T21:15:54.000Z
|
2022-02-23T19:16:25.000Z
|
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/transformations/constantfolding/data/wrong_assoc/out_2.py
|
JetBrains-Research/ast-transformations
|
0ab408af3275b520cc87a473f418c4b4dfcb0284
|
[
"MIT"
] | 4
|
2020-11-17T14:28:25.000Z
|
2022-02-24T07:54:28.000Z
|
ast-transformations-core/src/test/resources/org/jetbrains/research/ml/ast/transformations/constantfolding/data/wrong_assoc/out_2.py
|
nbirillo/ast-transformations
|
717706765a2da29087a0de768fc851698886dd65
|
[
"MIT"
] | 1
|
2022-02-23T19:16:30.000Z
|
2022-02-23T19:16:30.000Z
|
x = int(input())
_ = ((x) + (-3))
_ = ((x) + (x))
_ = ((x) + (-(x)))
_ = ((-(x)) + (-(x)))
_ = ((-(x)) + (-1))
_ = ((-(x)) + (-8))
_ = ((-(x)) + (-3))
| 15.2
| 21
| 0.197368
| 17
| 152
| 1.352941
| 0.352941
| 0.521739
| 0.652174
| 0.695652
| 0.304348
| 0.304348
| 0
| 0
| 0
| 0
| 0
| 0.035398
| 0.256579
| 152
| 9
| 22
| 16.888889
| 0.168142
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ae098a57d725b0da0428f9f1ad67983a2fb63e6
| 45,647
|
py
|
Python
|
sdk/python/pulumi_pagerduty/service.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2020-05-27T08:18:35.000Z
|
2021-07-31T08:40:03.000Z
|
sdk/python/pulumi_pagerduty/service.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 48
|
2020-05-26T10:59:40.000Z
|
2022-03-31T15:41:54.000Z
|
sdk/python/pulumi_pagerduty/service.py
|
pulumi/pulumi-pagerduty
|
1c08849cda3d5fccf5eb9f615dc004b1f8f90555
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-26T17:51:56.000Z
|
2020-05-26T17:51:56.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['ServiceArgs', 'Service']
@pulumi.input_type
class ServiceArgs:
def __init__(__self__, *,
escalation_policy: pulumi.Input[str],
acknowledgement_timeout: Optional[pulumi.Input[str]] = None,
alert_creation: Optional[pulumi.Input[str]] = None,
alert_grouping: Optional[pulumi.Input[str]] = None,
alert_grouping_parameters: Optional[pulumi.Input['ServiceAlertGroupingParametersArgs']] = None,
alert_grouping_timeout: Optional[pulumi.Input[int]] = None,
auto_resolve_timeout: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
incident_urgency_rule: Optional[pulumi.Input['ServiceIncidentUrgencyRuleArgs']] = None,
name: Optional[pulumi.Input[str]] = None,
scheduled_actions: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceScheduledActionArgs']]]] = None,
support_hours: Optional[pulumi.Input['ServiceSupportHoursArgs']] = None):
"""
The set of arguments for constructing a Service resource.
:param pulumi.Input[str] escalation_policy: The escalation policy used by this service.
:param pulumi.Input[str] acknowledgement_timeout: Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
:param pulumi.Input[str] alert_creation: Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
:param pulumi.Input[str] alert_grouping: (Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
:param pulumi.Input['ServiceAlertGroupingParametersArgs'] alert_grouping_parameters: Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
:param pulumi.Input[int] alert_grouping_timeout: (Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
:param pulumi.Input[str] auto_resolve_timeout: Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
:param pulumi.Input[str] name: The name of the service.
"""
pulumi.set(__self__, "escalation_policy", escalation_policy)
if acknowledgement_timeout is not None:
pulumi.set(__self__, "acknowledgement_timeout", acknowledgement_timeout)
if alert_creation is not None:
pulumi.set(__self__, "alert_creation", alert_creation)
if alert_grouping is not None:
pulumi.set(__self__, "alert_grouping", alert_grouping)
if alert_grouping_parameters is not None:
pulumi.set(__self__, "alert_grouping_parameters", alert_grouping_parameters)
if alert_grouping_timeout is not None:
pulumi.set(__self__, "alert_grouping_timeout", alert_grouping_timeout)
if auto_resolve_timeout is not None:
pulumi.set(__self__, "auto_resolve_timeout", auto_resolve_timeout)
if description is None:
description = 'Managed by Pulumi'
if description is not None:
pulumi.set(__self__, "description", description)
if incident_urgency_rule is not None:
pulumi.set(__self__, "incident_urgency_rule", incident_urgency_rule)
if name is not None:
pulumi.set(__self__, "name", name)
if scheduled_actions is not None:
pulumi.set(__self__, "scheduled_actions", scheduled_actions)
if support_hours is not None:
pulumi.set(__self__, "support_hours", support_hours)
@property
@pulumi.getter(name="escalationPolicy")
def escalation_policy(self) -> pulumi.Input[str]:
"""
The escalation policy used by this service.
"""
return pulumi.get(self, "escalation_policy")
@escalation_policy.setter
def escalation_policy(self, value: pulumi.Input[str]):
pulumi.set(self, "escalation_policy", value)
@property
@pulumi.getter(name="acknowledgementTimeout")
def acknowledgement_timeout(self) -> Optional[pulumi.Input[str]]:
"""
Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
"""
return pulumi.get(self, "acknowledgement_timeout")
@acknowledgement_timeout.setter
def acknowledgement_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acknowledgement_timeout", value)
@property
@pulumi.getter(name="alertCreation")
def alert_creation(self) -> Optional[pulumi.Input[str]]:
"""
Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
"""
return pulumi.get(self, "alert_creation")
@alert_creation.setter
def alert_creation(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alert_creation", value)
@property
@pulumi.getter(name="alertGrouping")
def alert_grouping(self) -> Optional[pulumi.Input[str]]:
"""
(Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
"""
return pulumi.get(self, "alert_grouping")
@alert_grouping.setter
def alert_grouping(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alert_grouping", value)
@property
@pulumi.getter(name="alertGroupingParameters")
def alert_grouping_parameters(self) -> Optional[pulumi.Input['ServiceAlertGroupingParametersArgs']]:
"""
Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
"""
return pulumi.get(self, "alert_grouping_parameters")
@alert_grouping_parameters.setter
def alert_grouping_parameters(self, value: Optional[pulumi.Input['ServiceAlertGroupingParametersArgs']]):
pulumi.set(self, "alert_grouping_parameters", value)
@property
@pulumi.getter(name="alertGroupingTimeout")
def alert_grouping_timeout(self) -> Optional[pulumi.Input[int]]:
"""
(Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
"""
return pulumi.get(self, "alert_grouping_timeout")
@alert_grouping_timeout.setter
def alert_grouping_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "alert_grouping_timeout", value)
@property
@pulumi.getter(name="autoResolveTimeout")
def auto_resolve_timeout(self) -> Optional[pulumi.Input[str]]:
"""
Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
"""
return pulumi.get(self, "auto_resolve_timeout")
@auto_resolve_timeout.setter
def auto_resolve_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auto_resolve_timeout", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="incidentUrgencyRule")
def incident_urgency_rule(self) -> Optional[pulumi.Input['ServiceIncidentUrgencyRuleArgs']]:
return pulumi.get(self, "incident_urgency_rule")
@incident_urgency_rule.setter
def incident_urgency_rule(self, value: Optional[pulumi.Input['ServiceIncidentUrgencyRuleArgs']]):
pulumi.set(self, "incident_urgency_rule", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the service.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="scheduledActions")
def scheduled_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceScheduledActionArgs']]]]:
return pulumi.get(self, "scheduled_actions")
@scheduled_actions.setter
def scheduled_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceScheduledActionArgs']]]]):
pulumi.set(self, "scheduled_actions", value)
@property
@pulumi.getter(name="supportHours")
def support_hours(self) -> Optional[pulumi.Input['ServiceSupportHoursArgs']]:
return pulumi.get(self, "support_hours")
@support_hours.setter
def support_hours(self, value: Optional[pulumi.Input['ServiceSupportHoursArgs']]):
pulumi.set(self, "support_hours", value)
@pulumi.input_type
class _ServiceState:
def __init__(__self__, *,
acknowledgement_timeout: Optional[pulumi.Input[str]] = None,
alert_creation: Optional[pulumi.Input[str]] = None,
alert_grouping: Optional[pulumi.Input[str]] = None,
alert_grouping_parameters: Optional[pulumi.Input['ServiceAlertGroupingParametersArgs']] = None,
alert_grouping_timeout: Optional[pulumi.Input[int]] = None,
auto_resolve_timeout: Optional[pulumi.Input[str]] = None,
created_at: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
escalation_policy: Optional[pulumi.Input[str]] = None,
html_url: Optional[pulumi.Input[str]] = None,
incident_urgency_rule: Optional[pulumi.Input['ServiceIncidentUrgencyRuleArgs']] = None,
last_incident_timestamp: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduled_actions: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceScheduledActionArgs']]]] = None,
status: Optional[pulumi.Input[str]] = None,
support_hours: Optional[pulumi.Input['ServiceSupportHoursArgs']] = None):
"""
Input properties used for looking up and filtering Service resources.
:param pulumi.Input[str] acknowledgement_timeout: Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
:param pulumi.Input[str] alert_creation: Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
:param pulumi.Input[str] alert_grouping: (Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
:param pulumi.Input['ServiceAlertGroupingParametersArgs'] alert_grouping_parameters: Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
:param pulumi.Input[int] alert_grouping_timeout: (Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
:param pulumi.Input[str] auto_resolve_timeout: Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
:param pulumi.Input[str] escalation_policy: The escalation policy used by this service.
:param pulumi.Input[str] name: The name of the service.
"""
if acknowledgement_timeout is not None:
pulumi.set(__self__, "acknowledgement_timeout", acknowledgement_timeout)
if alert_creation is not None:
pulumi.set(__self__, "alert_creation", alert_creation)
if alert_grouping is not None:
pulumi.set(__self__, "alert_grouping", alert_grouping)
if alert_grouping_parameters is not None:
pulumi.set(__self__, "alert_grouping_parameters", alert_grouping_parameters)
if alert_grouping_timeout is not None:
pulumi.set(__self__, "alert_grouping_timeout", alert_grouping_timeout)
if auto_resolve_timeout is not None:
pulumi.set(__self__, "auto_resolve_timeout", auto_resolve_timeout)
if created_at is not None:
pulumi.set(__self__, "created_at", created_at)
if description is None:
description = 'Managed by Pulumi'
if description is not None:
pulumi.set(__self__, "description", description)
if escalation_policy is not None:
pulumi.set(__self__, "escalation_policy", escalation_policy)
if html_url is not None:
pulumi.set(__self__, "html_url", html_url)
if incident_urgency_rule is not None:
pulumi.set(__self__, "incident_urgency_rule", incident_urgency_rule)
if last_incident_timestamp is not None:
pulumi.set(__self__, "last_incident_timestamp", last_incident_timestamp)
if name is not None:
pulumi.set(__self__, "name", name)
if scheduled_actions is not None:
pulumi.set(__self__, "scheduled_actions", scheduled_actions)
if status is not None:
pulumi.set(__self__, "status", status)
if support_hours is not None:
pulumi.set(__self__, "support_hours", support_hours)
@property
@pulumi.getter(name="acknowledgementTimeout")
def acknowledgement_timeout(self) -> Optional[pulumi.Input[str]]:
"""
Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
"""
return pulumi.get(self, "acknowledgement_timeout")
@acknowledgement_timeout.setter
def acknowledgement_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acknowledgement_timeout", value)
@property
@pulumi.getter(name="alertCreation")
def alert_creation(self) -> Optional[pulumi.Input[str]]:
"""
Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
"""
return pulumi.get(self, "alert_creation")
@alert_creation.setter
def alert_creation(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alert_creation", value)
@property
@pulumi.getter(name="alertGrouping")
def alert_grouping(self) -> Optional[pulumi.Input[str]]:
"""
(Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
"""
return pulumi.get(self, "alert_grouping")
@alert_grouping.setter
def alert_grouping(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "alert_grouping", value)
@property
@pulumi.getter(name="alertGroupingParameters")
def alert_grouping_parameters(self) -> Optional[pulumi.Input['ServiceAlertGroupingParametersArgs']]:
"""
Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
"""
return pulumi.get(self, "alert_grouping_parameters")
@alert_grouping_parameters.setter
def alert_grouping_parameters(self, value: Optional[pulumi.Input['ServiceAlertGroupingParametersArgs']]):
pulumi.set(self, "alert_grouping_parameters", value)
@property
@pulumi.getter(name="alertGroupingTimeout")
def alert_grouping_timeout(self) -> Optional[pulumi.Input[int]]:
"""
(Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
"""
return pulumi.get(self, "alert_grouping_timeout")
@alert_grouping_timeout.setter
def alert_grouping_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "alert_grouping_timeout", value)
@property
@pulumi.getter(name="autoResolveTimeout")
def auto_resolve_timeout(self) -> Optional[pulumi.Input[str]]:
"""
Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
"""
return pulumi.get(self, "auto_resolve_timeout")
@auto_resolve_timeout.setter
def auto_resolve_timeout(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auto_resolve_timeout", value)
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "created_at")
@created_at.setter
def created_at(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "created_at", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="escalationPolicy")
def escalation_policy(self) -> Optional[pulumi.Input[str]]:
"""
The escalation policy used by this service.
"""
return pulumi.get(self, "escalation_policy")
@escalation_policy.setter
def escalation_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "escalation_policy", value)
@property
@pulumi.getter(name="htmlUrl")
def html_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "html_url")
@html_url.setter
def html_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "html_url", value)
@property
@pulumi.getter(name="incidentUrgencyRule")
def incident_urgency_rule(self) -> Optional[pulumi.Input['ServiceIncidentUrgencyRuleArgs']]:
return pulumi.get(self, "incident_urgency_rule")
@incident_urgency_rule.setter
def incident_urgency_rule(self, value: Optional[pulumi.Input['ServiceIncidentUrgencyRuleArgs']]):
pulumi.set(self, "incident_urgency_rule", value)
@property
@pulumi.getter(name="lastIncidentTimestamp")
def last_incident_timestamp(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "last_incident_timestamp")
@last_incident_timestamp.setter
def last_incident_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_incident_timestamp", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the service.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="scheduledActions")
def scheduled_actions(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceScheduledActionArgs']]]]:
return pulumi.get(self, "scheduled_actions")
@scheduled_actions.setter
def scheduled_actions(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceScheduledActionArgs']]]]):
pulumi.set(self, "scheduled_actions", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="supportHours")
def support_hours(self) -> Optional[pulumi.Input['ServiceSupportHoursArgs']]:
return pulumi.get(self, "support_hours")
@support_hours.setter
def support_hours(self, value: Optional[pulumi.Input['ServiceSupportHoursArgs']]):
pulumi.set(self, "support_hours", value)
class Service(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acknowledgement_timeout: Optional[pulumi.Input[str]] = None,
alert_creation: Optional[pulumi.Input[str]] = None,
alert_grouping: Optional[pulumi.Input[str]] = None,
alert_grouping_parameters: Optional[pulumi.Input[pulumi.InputType['ServiceAlertGroupingParametersArgs']]] = None,
alert_grouping_timeout: Optional[pulumi.Input[int]] = None,
auto_resolve_timeout: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
escalation_policy: Optional[pulumi.Input[str]] = None,
incident_urgency_rule: Optional[pulumi.Input[pulumi.InputType['ServiceIncidentUrgencyRuleArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduled_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceScheduledActionArgs']]]]] = None,
support_hours: Optional[pulumi.Input[pulumi.InputType['ServiceSupportHoursArgs']]] = None,
__props__=None):
"""
A [service](https://developer.pagerduty.com/api-reference/reference/REST/openapiv3.json/paths/~1services/get) represents something you monitor (like a web service, email service, or database service). It is a container for related incidents that associates them with escalation policies.
## Example Usage
```python
import pulumi
import pulumi_pagerduty as pagerduty
example_user = pagerduty.User("exampleUser",
email="125.greenholt.earline@graham.name",
teams=[pagerduty_team["example"]["id"]])
foo = pagerduty.EscalationPolicy("foo",
num_loops=2,
rules=[pagerduty.EscalationPolicyRuleArgs(
escalation_delay_in_minutes=10,
targets=[pagerduty.EscalationPolicyRuleTargetArgs(
type="user",
id=example_user.id,
)],
)])
example_service = pagerduty.Service("exampleService",
auto_resolve_timeout="14400",
acknowledgement_timeout="600",
escalation_policy=pagerduty_escalation_policy["example"]["id"],
alert_creation="create_alerts_and_incidents")
```
## Import
Services can be imported using the `id`, e.g.
```sh
$ pulumi import pagerduty:index/service:Service main PLBP09X
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acknowledgement_timeout: Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
:param pulumi.Input[str] alert_creation: Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
:param pulumi.Input[str] alert_grouping: (Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
:param pulumi.Input[pulumi.InputType['ServiceAlertGroupingParametersArgs']] alert_grouping_parameters: Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
:param pulumi.Input[int] alert_grouping_timeout: (Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
:param pulumi.Input[str] auto_resolve_timeout: Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
:param pulumi.Input[str] escalation_policy: The escalation policy used by this service.
:param pulumi.Input[str] name: The name of the service.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServiceArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A [service](https://developer.pagerduty.com/api-reference/reference/REST/openapiv3.json/paths/~1services/get) represents something you monitor (like a web service, email service, or database service). It is a container for related incidents that associates them with escalation policies.
## Example Usage
```python
import pulumi
import pulumi_pagerduty as pagerduty
example_user = pagerduty.User("exampleUser",
email="125.greenholt.earline@graham.name",
teams=[pagerduty_team["example"]["id"]])
foo = pagerduty.EscalationPolicy("foo",
num_loops=2,
rules=[pagerduty.EscalationPolicyRuleArgs(
escalation_delay_in_minutes=10,
targets=[pagerduty.EscalationPolicyRuleTargetArgs(
type="user",
id=example_user.id,
)],
)])
example_service = pagerduty.Service("exampleService",
auto_resolve_timeout="14400",
acknowledgement_timeout="600",
escalation_policy=pagerduty_escalation_policy["example"]["id"],
alert_creation="create_alerts_and_incidents")
```
## Import
Services can be imported using the `id`, e.g.
```sh
$ pulumi import pagerduty:index/service:Service main PLBP09X
```
:param str resource_name: The name of the resource.
:param ServiceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServiceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
acknowledgement_timeout: Optional[pulumi.Input[str]] = None,
alert_creation: Optional[pulumi.Input[str]] = None,
alert_grouping: Optional[pulumi.Input[str]] = None,
alert_grouping_parameters: Optional[pulumi.Input[pulumi.InputType['ServiceAlertGroupingParametersArgs']]] = None,
alert_grouping_timeout: Optional[pulumi.Input[int]] = None,
auto_resolve_timeout: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
escalation_policy: Optional[pulumi.Input[str]] = None,
incident_urgency_rule: Optional[pulumi.Input[pulumi.InputType['ServiceIncidentUrgencyRuleArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduled_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceScheduledActionArgs']]]]] = None,
support_hours: Optional[pulumi.Input[pulumi.InputType['ServiceSupportHoursArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServiceArgs.__new__(ServiceArgs)
__props__.__dict__["acknowledgement_timeout"] = acknowledgement_timeout
__props__.__dict__["alert_creation"] = alert_creation
__props__.__dict__["alert_grouping"] = alert_grouping
__props__.__dict__["alert_grouping_parameters"] = alert_grouping_parameters
__props__.__dict__["alert_grouping_timeout"] = alert_grouping_timeout
__props__.__dict__["auto_resolve_timeout"] = auto_resolve_timeout
if description is None:
description = 'Managed by Pulumi'
__props__.__dict__["description"] = description
if escalation_policy is None and not opts.urn:
raise TypeError("Missing required property 'escalation_policy'")
__props__.__dict__["escalation_policy"] = escalation_policy
__props__.__dict__["incident_urgency_rule"] = incident_urgency_rule
__props__.__dict__["name"] = name
__props__.__dict__["scheduled_actions"] = scheduled_actions
__props__.__dict__["support_hours"] = support_hours
__props__.__dict__["created_at"] = None
__props__.__dict__["html_url"] = None
__props__.__dict__["last_incident_timestamp"] = None
__props__.__dict__["status"] = None
super(Service, __self__).__init__(
'pagerduty:index/service:Service',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
acknowledgement_timeout: Optional[pulumi.Input[str]] = None,
alert_creation: Optional[pulumi.Input[str]] = None,
alert_grouping: Optional[pulumi.Input[str]] = None,
alert_grouping_parameters: Optional[pulumi.Input[pulumi.InputType['ServiceAlertGroupingParametersArgs']]] = None,
alert_grouping_timeout: Optional[pulumi.Input[int]] = None,
auto_resolve_timeout: Optional[pulumi.Input[str]] = None,
created_at: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
escalation_policy: Optional[pulumi.Input[str]] = None,
html_url: Optional[pulumi.Input[str]] = None,
incident_urgency_rule: Optional[pulumi.Input[pulumi.InputType['ServiceIncidentUrgencyRuleArgs']]] = None,
last_incident_timestamp: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
scheduled_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceScheduledActionArgs']]]]] = None,
status: Optional[pulumi.Input[str]] = None,
support_hours: Optional[pulumi.Input[pulumi.InputType['ServiceSupportHoursArgs']]] = None) -> 'Service':
"""
Get an existing Service resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] acknowledgement_timeout: Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
:param pulumi.Input[str] alert_creation: Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
:param pulumi.Input[str] alert_grouping: (Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
:param pulumi.Input[pulumi.InputType['ServiceAlertGroupingParametersArgs']] alert_grouping_parameters: Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
:param pulumi.Input[int] alert_grouping_timeout: (Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
:param pulumi.Input[str] auto_resolve_timeout: Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
:param pulumi.Input[str] escalation_policy: The escalation policy used by this service.
:param pulumi.Input[str] name: The name of the service.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ServiceState.__new__(_ServiceState)
__props__.__dict__["acknowledgement_timeout"] = acknowledgement_timeout
__props__.__dict__["alert_creation"] = alert_creation
__props__.__dict__["alert_grouping"] = alert_grouping
__props__.__dict__["alert_grouping_parameters"] = alert_grouping_parameters
__props__.__dict__["alert_grouping_timeout"] = alert_grouping_timeout
__props__.__dict__["auto_resolve_timeout"] = auto_resolve_timeout
__props__.__dict__["created_at"] = created_at
__props__.__dict__["description"] = description
__props__.__dict__["escalation_policy"] = escalation_policy
__props__.__dict__["html_url"] = html_url
__props__.__dict__["incident_urgency_rule"] = incident_urgency_rule
__props__.__dict__["last_incident_timestamp"] = last_incident_timestamp
__props__.__dict__["name"] = name
__props__.__dict__["scheduled_actions"] = scheduled_actions
__props__.__dict__["status"] = status
__props__.__dict__["support_hours"] = support_hours
return Service(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="acknowledgementTimeout")
def acknowledgement_timeout(self) -> pulumi.Output[Optional[str]]:
"""
Time in seconds that an incident changes to the Triggered State after being Acknowledged. Disabled if set to the `"null"` string.
"""
return pulumi.get(self, "acknowledgement_timeout")
@property
@pulumi.getter(name="alertCreation")
def alert_creation(self) -> pulumi.Output[Optional[str]]:
"""
Must be one of two values. PagerDuty receives events from your monitoring systems and can then create incidents in different ways. Value "create_incidents" is default: events will create an incident that cannot be merged. Value "create_alerts_and_incidents" is the alternative: events will create an alert and then add it to a new incident, these incidents can be merged. This option is recommended.
"""
return pulumi.get(self, "alert_creation")
@property
@pulumi.getter(name="alertGrouping")
def alert_grouping(self) -> pulumi.Output[str]:
"""
(Deprecated) Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident; If value is set to `time`: All alerts within a specified duration will be grouped into the same incident. This duration is set in the `alert_grouping_timeout` setting (described below). Available on Standard, Enterprise, and Event Intelligence plans; If value is set to `intelligent` - Alerts will be intelligently grouped based on a machine learning model that looks at the alert summary, timing, and the history of grouped alerts. Available on Enterprise and Event Intelligence plan.
"""
return pulumi.get(self, "alert_grouping")
@property
@pulumi.getter(name="alertGroupingParameters")
def alert_grouping_parameters(self) -> pulumi.Output['outputs.ServiceAlertGroupingParameters']:
"""
Defines how alerts on this service will be automatically grouped into incidents. Note that the alert grouping features are available only on certain plans. If not set, each alert will create a separate incident.
"""
return pulumi.get(self, "alert_grouping_parameters")
@property
@pulumi.getter(name="alertGroupingTimeout")
def alert_grouping_timeout(self) -> pulumi.Output[Optional[int]]:
"""
(Deprecated) The duration in minutes within which to automatically group incoming alerts. This setting applies only when `alert_grouping` is set to `time`. To continue grouping alerts until the incident is resolved, set this value to `0`.
"""
return pulumi.get(self, "alert_grouping_timeout")
@property
@pulumi.getter(name="autoResolveTimeout")
def auto_resolve_timeout(self) -> pulumi.Output[Optional[str]]:
"""
Time in seconds that an incident is automatically resolved if left open for that long. Disabled if set to the `"null"` string.
"""
return pulumi.get(self, "auto_resolve_timeout")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> pulumi.Output[str]:
return pulumi.get(self, "created_at")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="escalationPolicy")
def escalation_policy(self) -> pulumi.Output[str]:
"""
The escalation policy used by this service.
"""
return pulumi.get(self, "escalation_policy")
@property
@pulumi.getter(name="htmlUrl")
def html_url(self) -> pulumi.Output[str]:
return pulumi.get(self, "html_url")
@property
@pulumi.getter(name="incidentUrgencyRule")
def incident_urgency_rule(self) -> pulumi.Output['outputs.ServiceIncidentUrgencyRule']:
return pulumi.get(self, "incident_urgency_rule")
@property
@pulumi.getter(name="lastIncidentTimestamp")
def last_incident_timestamp(self) -> pulumi.Output[str]:
return pulumi.get(self, "last_incident_timestamp")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the service.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="scheduledActions")
def scheduled_actions(self) -> pulumi.Output[Optional[Sequence['outputs.ServiceScheduledAction']]]:
return pulumi.get(self, "scheduled_actions")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
return pulumi.get(self, "status")
@property
@pulumi.getter(name="supportHours")
def support_hours(self) -> pulumi.Output[Optional['outputs.ServiceSupportHours']]:
return pulumi.get(self, "support_hours")
| 59.436198
| 759
| 0.703135
| 5,470
| 45,647
| 5.670201
| 0.057587
| 0.059937
| 0.074123
| 0.053908
| 0.932551
| 0.921686
| 0.9065
| 0.899536
| 0.882158
| 0.855365
| 0
| 0.001218
| 0.208667
| 45,647
| 767
| 760
| 59.51369
| 0.857428
| 0.396193
| 0
| 0.796681
| 1
| 0
| 0.158279
| 0.088439
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1639
| false
| 0.002075
| 0.014523
| 0.041494
| 0.278008
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7af32d3d98e38527b899d0e507a3b64734e8d01e
| 146
|
py
|
Python
|
libs/passwd.py
|
archever/flask-web
|
cd120f64deec31fd1a87285372abaa22fc379b9f
|
[
"MIT"
] | null | null | null |
libs/passwd.py
|
archever/flask-web
|
cd120f64deec31fd1a87285372abaa22fc379b9f
|
[
"MIT"
] | null | null | null |
libs/passwd.py
|
archever/flask-web
|
cd120f64deec31fd1a87285372abaa22fc379b9f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from hashlib import sha1
from libs.encoding import b
def gen_passwd(passwd, salt):
return sha1(b(passwd + salt)).hexdigest()
| 18.25
| 45
| 0.732877
| 23
| 146
| 4.608696
| 0.695652
| 0.188679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02439
| 0.157534
| 146
| 7
| 46
| 20.857143
| 0.837398
| 0.082192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.5
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
|
0
| 7
|
bb0cd90fcd1d701f1b31634edbe2cc2019ce54d8
| 35,508
|
py
|
Python
|
py2df/reading/event_decorators.py
|
PgBiel/Py2DF
|
cbce77763e90b63e6824b4d3f506236fb9925a5c
|
[
"MIT"
] | 1
|
2021-06-02T00:07:28.000Z
|
2021-06-02T00:07:28.000Z
|
py2df/reading/event_decorators.py
|
jmyrick02/Py2DF
|
cbce77763e90b63e6824b4d3f506236fb9925a5c
|
[
"MIT"
] | null | null | null |
py2df/reading/event_decorators.py
|
jmyrick02/Py2DF
|
cbce77763e90b63e6824b4d3f506236fb9925a5c
|
[
"MIT"
] | null | null | null |
"""
Events and their decorators.
"""
import typing
from .reader import DFReader
from ..classes import FunctionHolder, Codeblock, JSONData
from ..enums import PlayerEventType, EntityEventType, BlockType
from ..utils import remove_u200b_from_doc
from ..constants import BLOCK_ID, EMPTY_ARGS
class PlayerEvent(FunctionHolder, Codeblock, JSONData):
"""
Describes a Player Event on DiamondFire. Implements :class:`~py2df.classes.abc.Codeblock` and
:class:`~py2df.classes.abc.FunctionHolder`.
Check out this class's classmethods for the decorators.
Example usage::
@PlayerEvent.join
def on_join(self):
...
In that example, all DF-relevant code inside ``on_join`` will be executed whenever a player joins the plot.
Attributes
----------\u200b
block : :attr:`~py2df.enums.parameters.BlockType.PLAYER_EVENT`
Constant class var; type of event.
args : ``None``
There are no arguments to be passed to an event, so this will always remain as None.
action : :class:`~py2df.enums.events.PlayerEventType`
The kind of Player Event this is.
length : :class:`int`
The space, in Minecraft blocks, that this codeblock occupies. For a Player Event, this is equal to ``2``.
function : :class:`Callable`
The function representing the line of code that is preceded by this Player Event block.
"""
__slots__ = ("args", "action", "function")
block: BlockType = BlockType.PLAYER_EVENT
length: int = 2
args: None
action: PlayerEventType
data: None = None
function: typing.Optional[typing.Callable]
sub_action: None = None
target: None = None
def __init__(
self, action: PlayerEventType, func: typing.Optional[typing.Callable] = None,
append_to_reader: bool = False
):
"""
Initialize the Player Event.
Parameters
----------
action : :class:`~py2df.enums.events.PlayerEventType`
The kind of Player Event this is.
func : :class:`Callable`
The function that contains the code that will be executed when this event is triggered.
append_to_reader : :class:`bool`
Whether or not should already add this event as one of the :class:`DFReader` singleton's function holders.
Defaults to ``False``; this will always be ``True`` when using decorators.
"""
self.action = PlayerEventType(action)
if not callable(func):
raise TypeError("'func' parameter must be a callable (preferably, a function).")
self.function = func
self.args = None
if append_to_reader:
DFReader().append_function(typing.cast(FunctionHolder, self))
def as_json_data(self) -> dict:
"""
Obtains a valid json-serializable :class:`dict` representing this player event.
Returns
-------
:class:`dict`
A valid JSON-serializable dict.
"""
return dict(
id=BLOCK_ID,
block=PlayerEvent.block.value,
args=EMPTY_ARGS,
action=self.action.value
)
@classmethod
def from_json_data(cls, data: dict) -> "PlayerEvent":
"""
Obtains a :class:`PlayerEvent` instance from valid, **pre-parsed** JSON dict, with the following structure
(at least the following key and value)::
{ "action": str }
where ``str`` is a valid **value** (not attr) of :class:`~py2df.enums.events.PlayerEventType`.
Parameters
----------
data : :class:`dict`
Already parsed JSON dict containing valid Player Event data.
Returns
-------
:class:`PlayerEvent`
The corresponding Player Event instance.
Raises
------
:exc:`TypeError`
If the data dict provided is malformed (does not follow the given structure).
"""
if (
not isinstance(data, dict)
# or "id" not in data # not really required
or "action" not in data
or not type(data["action"]) == str
or not data["action"] in PlayerEventType.__members__
):
raise TypeError(
"Malformed PlayerEvent parsed JSON data! Must be a dict with, at least, an 'action' str value that is a"
"valid 'PlayerEventType' value."
)
return cls(PlayerEventType(data["action"]))
# region:PlayerEvent_methods
@classmethod
def break_block(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player breaks a block. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.BREAK_BLOCK, func, append_to_reader=True)
@classmethod
def break_item(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player breaks an item. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.BREAK_ITEM, func, append_to_reader=True)
@classmethod
def change_slot(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player changes their hotbar slot. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CHANGE_SLOT, func, append_to_reader=True)
@classmethod
def click_entity(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player right clicks an entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CLICK_ENTITY, func, append_to_reader=True)
@classmethod
def click_item(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player clicks an item in an inventory menu. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CLICK_ITEM, func, append_to_reader=True)
@classmethod
def click_own_inv(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player clicks an item inside their inventory. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CLICK_OWN_INV, func, append_to_reader=True)
@classmethod
def click_player(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player clicks another player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CLICK_PLAYER, func, append_to_reader=True)
@classmethod
def close_inv(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player closes an inventory. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CLOSE_INV, func, append_to_reader=True)
@classmethod
def command(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player types a command on the plot. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.COMMAND, func, append_to_reader=True)
@classmethod
def consume(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player eats or drinks an item. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.CONSUME, func, append_to_reader=True)
@classmethod
def damage_entity(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player damages an entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.DAMAGE_ENTITY, func, append_to_reader=True)
@classmethod
def death(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player dies, not as a result of another player or entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.DEATH, func, append_to_reader=True)
@classmethod
def dismount(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player dismounts a vehicle or other entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.DISMOUNT, func, append_to_reader=True)
@classmethod
def drop_item(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player drops an item. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.DROP_ITEM, func, append_to_reader=True)
@classmethod
def entity_dmg_player(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when an entity damages a player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.ENTITY_DMG_PLAYER, func, append_to_reader=True)
@classmethod
def fall_damage(cls, func: typing.Callable) -> "PlayerEvent":
""""Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.FALL_DAMAGE, func, append_to_reader=True)
@classmethod
def join(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player joins the plot. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.JOIN, func, append_to_reader=True)
@classmethod
def jump(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player jumps. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.JUMP, func, append_to_reader=True)
@classmethod
def kill_mob(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player kills a mob. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.KILL_MOB, func, append_to_reader=True)
@classmethod
def kill_player(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player kills another player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.KILL_PLAYER, func, append_to_reader=True)
@classmethod
def left_click(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player left clicks. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.LEFT_CLICK, func, append_to_reader=True)
@classmethod
def mob_kill_player(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a mob kills a player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.MOB_KILL_PLAYER, func, append_to_reader=True)
@classmethod
def pickup_item(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player picks up an item. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.PICKUP_ITEM, func, append_to_reader=True)
@classmethod
def place_block(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player places a block. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.PLACE_BLOCK, func, append_to_reader=True)
@classmethod
def player_dmg_player(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player damages another player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.PLAYER_DMG_PLAYER, func, append_to_reader=True)
@classmethod
def player_take_dmg(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player takes damage. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.PLAYER_TAKE_DMG, func, append_to_reader=True)
@classmethod
def proj_dmg_player(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a projectile damages a player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.PROJ_DMG_PLAYER, func, append_to_reader=True)
@classmethod
def proj_hit(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a projectile launched by a player hits a block/an entity/another player. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.PROJ_HIT, func, append_to_reader=True)
@classmethod
def quit(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player leaves the plot. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.QUIT, func, append_to_reader=True)
@classmethod
def respawn(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player respawns. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.RESPAWN, func, append_to_reader=True)
@classmethod
def right_click(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player right clicks while looking at a block or holding an item. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.RIGHT_CLICK, func, append_to_reader=True)
@classmethod
def riptide(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player throws a riptide trident. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.RIPTIDE, func, append_to_reader=True)
@classmethod
def shoot_bow(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player fires an arrow with a bow. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.SHOOT_BOW, func, append_to_reader=True)
@classmethod
def sneak(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player sneaks. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.SNEAK, func, append_to_reader=True)
@classmethod
def start_fly(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player starts flying. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.START_FLY, func, append_to_reader=True)
@classmethod
def start_sprint(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player starts sprinting. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.START_SPRINT, func, append_to_reader=True)
@classmethod
def stop_fly(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player stops flying. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.STOP_FLY, func, append_to_reader=True)
@classmethod
def stop_sprint(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player stops sprinting. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.STOP_SPRINT, func, append_to_reader=True)
@classmethod
def swap_hands(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player swaps an item or items between their main hand and off hand. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.SWAP_HANDS, func, append_to_reader=True)
@classmethod
def unsneak(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code when a player stops sneaking. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.UNSNEAK, func, append_to_reader=True)
@classmethod
def walk(cls, func: typing.Callable) -> "PlayerEvent":
"""Executes code while a player is walking. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`PlayerEvent`
The corresponding :class:`PlayerEvent` instance object.
"""
return cls(PlayerEventType.WALK, func, append_to_reader=True)
# endregion:PlayerEvent_methods
class EntityEvent(FunctionHolder):
"""
Describes an Entity Event on DiamondFire. Implements :class:`~py2df.classes.abc.Codeblock` and
:class:`~py2df.classes.abc.FunctionHolder`.
Check out this class's classmethods for the decorators.
Example usage::
@EntityEvent.entity_death
def on_entity_death(self):
...
In that example, all DF-relevant code inside ``on_entity_death`` will be executed whenever an entity dies in the
plot.
Attributes
----------\u200b
block : :attr:`~py2df.enums.parameters.BlockType.ENTITY_EVENT`
Constant class var; type of event.
args : ``None``
There are no arguments to be passed to an event, so this will always remain as None.
action : :class:`~py2df.enums.events.EntityEventType`
The kind of Entity Event this is.
length : :class:`int`
The space, in Minecraft blocks, that this codeblock occupies. For an Entity Event, this is equal to ``2``.
function : :class:`Callable`
The function representing the line of code that is preceded by this Entity Event block.
"""
__slots__ = ("args", "action", "function")
block: BlockType = BlockType.ENTITY_EVENT
length: int = 2
args: None
action: EntityEventType
data: None = None
function: typing.Optional[typing.Callable]
sub_action: None = None
target: None = None
def __init__(
self, action: EntityEventType, func: typing.Optional[typing.Callable] = None,
append_to_reader: bool = False
):
"""
Initialize the Entity Event.
Parameters
----------
action : :class:`~py2df.enums.events.EntityEventType`
The kind of Entity Event this is.
func : :class:`Callable`
The function that contains the code that will be executed when this event is triggered.
append_to_reader : :class:`bool`
Whether or not should already add, on ``__init__``, this event as one of the :class:`DFReader`
singleton's function holders. Defaults to ``False``; this will always be ``True`` when using decorators.
"""
self.action = EntityEventType(action)
if not callable(func):
raise TypeError("'func' parameter must be a callable (preferably, a function).")
self.function = func
self.args = None
if append_to_reader:
DFReader().append_function(typing.cast(FunctionHolder, self))
def as_json_data(self) -> dict:
"""
Obtains a valid json-serializable :class:`dict` representing this entity event.
Returns
-------
:class:`dict`
A valid JSON-serializable dict.
"""
return dict(
id=BLOCK_ID,
block=EntityEvent.block.value,
args=EMPTY_ARGS,
action=self.action.value
)
@classmethod
def from_json_data(cls, data: dict) -> "EntityEvent":
"""
Obtains a :class:`EntityEvent` instance from valid, **pre-parsed** JSON dict, with the following structure
(at least the following key and value)::
{ "action": str }
where ``str`` is a valid **value** (not attr) of :class:`~py2df.enums.events.EntityEventType`.
Parameters
----------
data : :class:`dict`
Already parsed JSON dict containing valid Entity Event data.
Returns
-------
:class:`EntityEvent`
The corresponding Entity Event instance.
Raises
------
:exc:`TypeError`
If the data dict provided is malformed (does not follow the given structure).
"""
if (
not isinstance(data, dict)
# or "id" not in data # not really required
or "action" not in data
or not type(data["action"]) == str
or not data["action"] in EntityEventType.__members__
):
raise TypeError(
"Malformed EntityEvent parsed JSON data! Must be a dict with, at least, an 'action' str value that is a"
"valid 'EntityEventType' value."
)
return cls(EntityEventType(data["action"]))
# region:EntityEvent_methods
@classmethod
def block_fall(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when a block affected by gravity turns into a falling block. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.BLOCK_FALL, func, append_to_reader=True)
@classmethod
def entity_death(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when an entity dies by natural causes. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.ENTITY_DEATH, func, append_to_reader=True)
@classmethod
def entity_dmg(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when an entity takes damage. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.ENTITY_DMG, func, append_to_reader=True)
@classmethod
def entity_dmg_entity(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when an entity damages another entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.ENTITY_DMG_ENTITY, func, append_to_reader=True)
@classmethod
def entity_kill_entity(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when an entity kills another entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.ENTITY_KILL_ENTITY, func, append_to_reader=True)
@classmethod
def falling_block_land(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when a falling block lands on the ground. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.FALLING_BLOCK_LAND, func, append_to_reader=True)
@classmethod
def proj_dmg_entity(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when a projectile damages an entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.PROJ_DMG_ENTITY, func, append_to_reader=True)
@classmethod
def proj_kill_entity(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when a projectile kills an entity. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.PROJ_KILL_ENTITY, func, append_to_reader=True)
@classmethod
def vehicle_damage(cls, func: typing.Callable) -> "EntityEvent":
"""Executes code when a vehicle entity (minecart or boat) is damaged. Decorator.
Parameters
----------
func : :class:`Callable`
Function containing the code to be run when this event is triggered.
Returns
-------
:class:`EntityEvent`
The corresponding :class:`EntityEvent` instance object.
"""
return cls(EntityEventType.VEHICLE_DAMAGE, func, append_to_reader=True)
# endregion:EntityEvent_methods
remove_u200b_from_doc((PlayerEvent, EntityEvent))
| 31.395225
| 120
| 0.601104
| 3,716
| 35,508
| 5.669268
| 0.074273
| 0.064556
| 0.037215
| 0.037025
| 0.902929
| 0.89429
| 0.890065
| 0.863293
| 0.817867
| 0.77657
| 0
| 0.001116
| 0.293596
| 35,508
| 1,130
| 121
| 31.423009
| 0.838775
| 0.527825
| 0
| 0.443515
| 0
| 0.008368
| 0.086337
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.23431
| false
| 0
| 0.025105
| 0
| 0.569038
| 0.008368
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
249f9dd441bbd92fd1ebb191eb0306f2966b1e43
| 9,845
|
py
|
Python
|
src/goose_lang/examples/nfs_spec/symtest/rfc1057/client.py
|
herbelin/perennial
|
49b044fa83b4df2dc23262571e79c1165006bdc8
|
[
"MIT"
] | 73
|
2019-09-24T14:50:57.000Z
|
2022-03-25T02:01:55.000Z
|
src/goose_lang/examples/nfs_spec/symtest/rfc1057/client.py
|
herbelin/perennial
|
49b044fa83b4df2dc23262571e79c1165006bdc8
|
[
"MIT"
] | 39
|
2020-01-31T19:08:09.000Z
|
2022-01-14T15:46:56.000Z
|
src/goose_lang/examples/nfs_spec/symtest/rfc1057/client.py
|
herbelin/perennial
|
49b044fa83b4df2dc23262571e79c1165006bdc8
|
[
"MIT"
] | 17
|
2020-01-22T14:49:13.000Z
|
2021-11-26T18:38:48.000Z
|
# Generated by rpcgen.py from /home/nickolai/proj/go-rpcgen/rfc1057/prot.x on Fri Dec 6 10:47:00 2019
import rpc
import const
import pack
class PMAP_PROG(object):
class RawTCPPMAP_VERS(rpc.RawTCPClient):
def __init__(self, host, port, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.RawTCPClient.__init__(self, host, port, program=const.PMAP_PROG, version=const.PMAP_VERS, **kwargs)
# void PMAPPROC_NULL(void)
def PMAPPROC_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# xbool PMAPPROC_SET(mapping)
def PMAPPROC_SET(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# xbool PMAPPROC_UNSET(mapping)
def PMAPPROC_UNSET(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# uint32 PMAPPROC_GETPORT(mapping)
def PMAPPROC_GETPORT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_uint32()
unpacker.done()
return res
# pmaplist PMAPPROC_DUMP(void)
def PMAPPROC_DUMP(self):
procedure_id = 4
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_pmaplist()
unpacker.done()
return res
# call_result PMAPPROC_CALLIT(call_args)
def PMAPPROC_CALLIT(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_call_args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_call_result()
unpacker.done()
return res
class TCPPMAP_VERS(rpc.TCPClient):
def __init__(self, host, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.TCPClient.__init__(self, host, program=const.PMAP_PROG, version=const.PMAP_VERS, **kwargs)
# void PMAPPROC_NULL(void)
def PMAPPROC_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# xbool PMAPPROC_SET(mapping)
def PMAPPROC_SET(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# xbool PMAPPROC_UNSET(mapping)
def PMAPPROC_UNSET(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# uint32 PMAPPROC_GETPORT(mapping)
def PMAPPROC_GETPORT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_uint32()
unpacker.done()
return res
# pmaplist PMAPPROC_DUMP(void)
def PMAPPROC_DUMP(self):
procedure_id = 4
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_pmaplist()
unpacker.done()
return res
# call_result PMAPPROC_CALLIT(call_args)
def PMAPPROC_CALLIT(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_call_args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_call_result()
unpacker.done()
return res
class RawUDPPMAP_VERS(rpc.RawUDPClient):
def __init__(self, host, port, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.RawUDPClient.__init__(self, host, port, program=const.PMAP_PROG, version=const.PMAP_VERS, **kwargs)
# void PMAPPROC_NULL(void)
def PMAPPROC_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# xbool PMAPPROC_SET(mapping)
def PMAPPROC_SET(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# xbool PMAPPROC_UNSET(mapping)
def PMAPPROC_UNSET(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# uint32 PMAPPROC_GETPORT(mapping)
def PMAPPROC_GETPORT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_uint32()
unpacker.done()
return res
# pmaplist PMAPPROC_DUMP(void)
def PMAPPROC_DUMP(self):
procedure_id = 4
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_pmaplist()
unpacker.done()
return res
# call_result PMAPPROC_CALLIT(call_args)
def PMAPPROC_CALLIT(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_call_args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_call_result()
unpacker.done()
return res
class UDPPMAP_VERS(rpc.UDPClient):
def __init__(self, host, **kwargs):
if 'program' in kwargs or 'version' in kwargs:
raise TypeError('Unexpected keyword argument')
rpc.UDPClient.__init__(self, host, program=const.PMAP_PROG, version=const.PMAP_VERS, **kwargs)
# void PMAPPROC_NULL(void)
def PMAPPROC_NULL(self):
procedure_id = 0
self.call(procedure_id, '')
return None
# xbool PMAPPROC_SET(mapping)
def PMAPPROC_SET(self, p0):
procedure_id = 1
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# xbool PMAPPROC_UNSET(mapping)
def PMAPPROC_UNSET(self, p0):
procedure_id = 2
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_xbool()
unpacker.done()
return res
# uint32 PMAPPROC_GETPORT(mapping)
def PMAPPROC_GETPORT(self, p0):
procedure_id = 3
packer = pack.protPacker()
packer.pack_mapping(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_uint32()
unpacker.done()
return res
# pmaplist PMAPPROC_DUMP(void)
def PMAPPROC_DUMP(self):
procedure_id = 4
res = self.call(procedure_id, '')
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_pmaplist()
unpacker.done()
return res
# call_result PMAPPROC_CALLIT(call_args)
def PMAPPROC_CALLIT(self, p0):
procedure_id = 5
packer = pack.protPacker()
packer.pack_call_args(p0)
res = self.call(procedure_id, packer.get_buffer())
unpacker = pack.protUnpacker(res)
res = unpacker.unpack_call_result()
unpacker.done()
return res
def __getitem__(self, key):
d = {
const.RawTCPPMAP_VERS : 'RawTCPPMAP_VERS',
const.TCPPMAP_VERS : 'TCPPMAP_VERS',
const.RawUDPPMAP_VERS : 'RawUDPPMAP_VERS',
const.UDPPMAP_VERS : 'UDPPMAP_VERS'
}
return getattr(self, d[key])
PMAP_PROG = PMAP_PROG()
| 35.413669
| 115
| 0.567902
| 1,037
| 9,845
| 5.176471
| 0.088717
| 0.098361
| 0.076006
| 0.084948
| 0.915611
| 0.915611
| 0.915611
| 0.915611
| 0.915611
| 0.915611
| 0
| 0.013434
| 0.342204
| 9,845
| 277
| 116
| 35.541516
| 0.815473
| 0.084916
| 0
| 0.904977
| 1
| 0
| 0.024271
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.131222
| false
| 0
| 0.013575
| 0
| 0.280543
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
24b7dc248f850ce55c7761a239c7abc2acd09c7e
| 8,906
|
py
|
Python
|
dfirtrack_main/tests/tag/test_tag_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_main/tests/tag/test_tag_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_main/tests/tag/test_tag_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Tag, Tagcolor
import urllib.parse
class TagViewTestCase(TestCase):
""" tag view tests """
@classmethod
def setUpTestData(cls):
# create object
tagcolor_1 = Tagcolor.objects.create(tagcolor_name='tag_1')
# create object
Tag.objects.create(tag_name='tag_1', tagcolor = tagcolor_1)
# create user
test_user = User.objects.create_user(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
def test_tags_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/tags/', safe='')
# get response
response = self.client.get('/tags/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_tags_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/')
# compare
self.assertEqual(response.status_code, 200)
def test_tags_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/tag/tags_list.html')
def test_tags_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_tag')
def test_tags_detail_not_logged_in(self):
""" test detail view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/tags/' + str(tag_1.tag_id), safe='')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_tags_detail_logged_in(self):
""" test detail view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id))
# compare
self.assertEqual(response.status_code, 200)
def test_tags_detail_template(self):
""" test detail view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id))
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/tag/tags_detail.html')
def test_tags_detail_get_user_context(self):
""" test detail view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id))
# compare
self.assertEqual(str(response.context['user']), 'testuser_tag')
def test_tags_add_not_logged_in(self):
""" test add view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/tags/add/', safe='')
# get response
response = self.client.get('/tags/add/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_tags_add_logged_in(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/add/')
# compare
self.assertEqual(response.status_code, 200)
def test_tags_add_template(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/add/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/tag/tags_add.html')
def test_tags_add_get_user_context(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/add/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_tag')
def test_tags_edit_not_logged_in(self):
""" test edit view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/tags/' + str(tag_1.tag_id) + '/edit/', safe='')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/edit/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_tags_edit_logged_in(self):
""" test edit view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/edit/')
# compare
self.assertEqual(response.status_code, 200)
def test_tags_edit_template(self):
""" test edit view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/edit/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/tag/tags_edit.html')
def test_tags_edit_get_user_context(self):
""" test edit view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/edit/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_tag')
def test_tags_delete_not_logged_in(self):
""" test delete view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/tags/' + str(tag_1.tag_id) + '/delete/', safe='')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/delete/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_tags_delete_logged_in(self):
""" test delete view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/delete/')
# compare
self.assertEqual(response.status_code, 200)
def test_tags_delete_template(self):
""" test delete view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/delete/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/tag/tags_delete.html')
def test_tags_delete_get_user_context(self):
""" test delete view """
# get object
tag_1 = Tag.objects.get(tag_name='tag_1')
# login testuser
login = self.client.login(username='testuser_tag', password='QVe1EH1Z5MshOW2GHS4b')
# get response
response = self.client.get('/tags/' + str(tag_1.tag_id) + '/delete/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_tag')
| 36.801653
| 110
| 0.626432
| 1,050
| 8,906
| 5.112381
| 0.06381
| 0.030551
| 0.035209
| 0.085693
| 0.898845
| 0.874255
| 0.864009
| 0.853577
| 0.795268
| 0.789121
| 0
| 0.02493
| 0.243319
| 8,906
| 241
| 111
| 36.954357
| 0.771628
| 0.136986
| 0
| 0.558824
| 0
| 0
| 0.151637
| 0.022445
| 0
| 0
| 0
| 0
| 0.196078
| 1
| 0.205882
| false
| 0.156863
| 0.039216
| 0
| 0.254902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
70072269bf5e3eb4d7bef74a9c24b14519c80777
| 144
|
py
|
Python
|
Python/tuple_set.py
|
mohammad716e/python_training
|
0654623c603c775ed2cbdc3919dc815891c8fdeb
|
[
"MIT"
] | null | null | null |
Python/tuple_set.py
|
mohammad716e/python_training
|
0654623c603c775ed2cbdc3919dc815891c8fdeb
|
[
"MIT"
] | null | null | null |
Python/tuple_set.py
|
mohammad716e/python_training
|
0654623c603c775ed2cbdc3919dc815891c8fdeb
|
[
"MIT"
] | null | null | null |
# 1
# tuple are immtable
t = 1,2,3 # or t = ( 1 , 2 , 3)
print(t)
# 2 sets are unique
q = set( [ 1,1,1,1,1,1,2,2,2,2,3,3,3,3,3,3,3])
print(q)
| 14.4
| 46
| 0.513889
| 40
| 144
| 1.85
| 0.325
| 0.162162
| 0.202703
| 0.216216
| 0.175676
| 0.094595
| 0
| 0
| 0
| 0
| 0
| 0.225225
| 0.229167
| 144
| 9
| 47
| 16
| 0.441441
| 0.402778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
707e95a9433e3e4925062999a7f7422b13d19196
| 130,537
|
py
|
Python
|
labware-library/src/labware-creator/protocolTemplates/CustomLabware_testprotocol.py
|
anuwrag/opentrons
|
28c8d76a19e367c6bd38f5290faaa32abf378715
|
[
"Apache-2.0"
] | 235
|
2017-10-27T20:37:27.000Z
|
2022-03-30T14:09:49.000Z
|
labware-library/src/labware-creator/protocolTemplates/CustomLabware_testprotocol.py
|
anuwrag/opentrons
|
28c8d76a19e367c6bd38f5290faaa32abf378715
|
[
"Apache-2.0"
] | 8,425
|
2017-10-26T15:25:43.000Z
|
2022-03-31T23:54:26.000Z
|
labware-library/src/labware-creator/protocolTemplates/CustomLabware_testprotocol.py
|
anuwrag/opentrons
|
28c8d76a19e367c6bd38f5290faaa32abf378715
|
[
"Apache-2.0"
] | 130
|
2017-11-09T21:02:37.000Z
|
2022-03-15T18:01:24.000Z
|
import json
from opentrons import protocol_api, types
from opentrons.types import Point
TEST_LABWARE_SLOT = '5'
RATE = 0.25 # % of default speeds
SLOWER_RATE = 0.1 #slower rate is very slow!
PIPETTE_MOUNT = 'right'
PIPETTE_NAME = 'p20_single_gen2'
TIPRACK_SLOT = '11'
TIPRACK_LOADNAME = 'opentrons_96_filtertiprack_20ul'
#PIPETTE_MOUNT = 'left'
#PIPETTE_NAME = 'p300_multi_gen2'
#TIPRACK_SLOT = '11'
#TIPRACK_LOADNAME = 'opentrons_96_tiprack_300ul'
LABWARE_DEF_JSON = """{
"ordering": [
[
"A1",
"B1",
"C1",
"D1",
"E1",
"F1",
"G1",
"H1",
"I1",
"J1",
"K1",
"L1",
"M1",
"N1",
"O1",
"P1"
],
[
"A2",
"B2",
"C2",
"D2",
"E2",
"F2",
"G2",
"H2",
"I2",
"J2",
"K2",
"L2",
"M2",
"N2",
"O2",
"P2"
],
[
"A3",
"B3",
"C3",
"D3",
"E3",
"F3",
"G3",
"H3",
"I3",
"J3",
"K3",
"L3",
"M3",
"N3",
"O3",
"P3"
],
[
"A4",
"B4",
"C4",
"D4",
"E4",
"F4",
"G4",
"H4",
"I4",
"J4",
"K4",
"L4",
"M4",
"N4",
"O4",
"P4"
],
[
"A5",
"B5",
"C5",
"D5",
"E5",
"F5",
"G5",
"H5",
"I5",
"J5",
"K5",
"L5",
"M5",
"N5",
"O5",
"P5"
],
[
"A6",
"B6",
"C6",
"D6",
"E6",
"F6",
"G6",
"H6",
"I6",
"J6",
"K6",
"L6",
"M6",
"N6",
"O6",
"P6"
],
[
"A7",
"B7",
"C7",
"D7",
"E7",
"F7",
"G7",
"H7",
"I7",
"J7",
"K7",
"L7",
"M7",
"N7",
"O7",
"P7"
],
[
"A8",
"B8",
"C8",
"D8",
"E8",
"F8",
"G8",
"H8",
"I8",
"J8",
"K8",
"L8",
"M8",
"N8",
"O8",
"P8"
],
[
"A9",
"B9",
"C9",
"D9",
"E9",
"F9",
"G9",
"H9",
"I9",
"J9",
"K9",
"L9",
"M9",
"N9",
"O9",
"P9"
],
[
"A10",
"B10",
"C10",
"D10",
"E10",
"F10",
"G10",
"H10",
"I10",
"J10",
"K10",
"L10",
"M10",
"N10",
"O10",
"P10"
],
[
"A11",
"B11",
"C11",
"D11",
"E11",
"F11",
"G11",
"H11",
"I11",
"J11",
"K11",
"L11",
"M11",
"N11",
"O11",
"P11"
],
[
"A12",
"B12",
"C12",
"D12",
"E12",
"F12",
"G12",
"H12",
"I12",
"J12",
"K12",
"L12",
"M12",
"N12",
"O12",
"P12"
],
[
"A13",
"B13",
"C13",
"D13",
"E13",
"F13",
"G13",
"H13",
"I13",
"J13",
"K13",
"L13",
"M13",
"N13",
"O13",
"P13"
],
[
"A14",
"B14",
"C14",
"D14",
"E14",
"F14",
"G14",
"H14",
"I14",
"J14",
"K14",
"L14",
"M14",
"N14",
"O14",
"P14"
],
[
"A15",
"B15",
"C15",
"D15",
"E15",
"F15",
"G15",
"H15",
"I15",
"J15",
"K15",
"L15",
"M15",
"N15",
"O15",
"P15"
],
[
"A16",
"B16",
"C16",
"D16",
"E16",
"F16",
"G16",
"H16",
"I16",
"J16",
"K16",
"L16",
"M16",
"N16",
"O16",
"P16"
],
[
"A17",
"B17",
"C17",
"D17",
"E17",
"F17",
"G17",
"H17",
"I17",
"J17",
"K17",
"L17",
"M17",
"N17",
"O17",
"P17"
],
[
"A18",
"B18",
"C18",
"D18",
"E18",
"F18",
"G18",
"H18",
"I18",
"J18",
"K18",
"L18",
"M18",
"N18",
"O18",
"P18"
],
[
"A19",
"B19",
"C19",
"D19",
"E19",
"F19",
"G19",
"H19",
"I19",
"J19",
"K19",
"L19",
"M19",
"N19",
"O19",
"P19"
],
[
"A20",
"B20",
"C20",
"D20",
"E20",
"F20",
"G20",
"H20",
"I20",
"J20",
"K20",
"L20",
"M20",
"N20",
"O20",
"P20"
],
[
"A21",
"B21",
"C21",
"D21",
"E21",
"F21",
"G21",
"H21",
"I21",
"J21",
"K21",
"L21",
"M21",
"N21",
"O21",
"P21"
],
[
"A22",
"B22",
"C22",
"D22",
"E22",
"F22",
"G22",
"H22",
"I22",
"J22",
"K22",
"L22",
"M22",
"N22",
"O22",
"P22"
],
[
"A23",
"B23",
"C23",
"D23",
"E23",
"F23",
"G23",
"H23",
"I23",
"J23",
"K23",
"L23",
"M23",
"N23",
"O23",
"P23"
],
[
"A24",
"B24",
"C24",
"D24",
"E24",
"F24",
"G24",
"H24",
"I24",
"J24",
"K24",
"L24",
"M24",
"N24",
"O24",
"P24"
]
],
"brand": {
"brand": "testing_LC",
"brandId": [
"1567987"
]
},
"metadata": {
"displayName": "Testing_LC 384 Well Plate 112 µL",
"displayCategory": "wellPlate",
"displayVolumeUnits": "µL",
"tags": []
},
"dimensions": {
"xDimension": 127.76,
"yDimension": 85.47,
"zDimension": 14.22
},
"wells": {
"A1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 76.49,
"z": 2.79
},
"B1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 71.99,
"z": 2.79
},
"C1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 67.49,
"z": 2.79
},
"D1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 62.99,
"z": 2.79
},
"E1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 58.49,
"z": 2.79
},
"F1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 53.99,
"z": 2.79
},
"G1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 49.49,
"z": 2.79
},
"H1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 44.99,
"z": 2.79
},
"I1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 40.49,
"z": 2.79
},
"J1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 35.99,
"z": 2.79
},
"K1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 31.49,
"z": 2.79
},
"L1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 26.99,
"z": 2.79
},
"M1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 22.49,
"z": 2.79
},
"N1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 17.99,
"z": 2.79
},
"O1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 13.49,
"z": 2.79
},
"P1": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 12.12,
"y": 8.99,
"z": 2.79
},
"A2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 76.49,
"z": 2.79
},
"B2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 71.99,
"z": 2.79
},
"C2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 67.49,
"z": 2.79
},
"D2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 62.99,
"z": 2.79
},
"E2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 58.49,
"z": 2.79
},
"F2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 53.99,
"z": 2.79
},
"G2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 49.49,
"z": 2.79
},
"H2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 44.99,
"z": 2.79
},
"I2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 40.49,
"z": 2.79
},
"J2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 35.99,
"z": 2.79
},
"K2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 31.49,
"z": 2.79
},
"L2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 26.99,
"z": 2.79
},
"M2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 22.49,
"z": 2.79
},
"N2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 17.99,
"z": 2.79
},
"O2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 13.49,
"z": 2.79
},
"P2": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 16.62,
"y": 8.99,
"z": 2.79
},
"A3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 76.49,
"z": 2.79
},
"B3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 71.99,
"z": 2.79
},
"C3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 67.49,
"z": 2.79
},
"D3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 62.99,
"z": 2.79
},
"E3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 58.49,
"z": 2.79
},
"F3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 53.99,
"z": 2.79
},
"G3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 49.49,
"z": 2.79
},
"H3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 44.99,
"z": 2.79
},
"I3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 40.49,
"z": 2.79
},
"J3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 35.99,
"z": 2.79
},
"K3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 31.49,
"z": 2.79
},
"L3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 26.99,
"z": 2.79
},
"M3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 22.49,
"z": 2.79
},
"N3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 17.99,
"z": 2.79
},
"O3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 13.49,
"z": 2.79
},
"P3": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 21.12,
"y": 8.99,
"z": 2.79
},
"A4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 76.49,
"z": 2.79
},
"B4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 71.99,
"z": 2.79
},
"C4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 67.49,
"z": 2.79
},
"D4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 62.99,
"z": 2.79
},
"E4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 58.49,
"z": 2.79
},
"F4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 53.99,
"z": 2.79
},
"G4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 49.49,
"z": 2.79
},
"H4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 44.99,
"z": 2.79
},
"I4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 40.49,
"z": 2.79
},
"J4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 35.99,
"z": 2.79
},
"K4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 31.49,
"z": 2.79
},
"L4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 26.99,
"z": 2.79
},
"M4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 22.49,
"z": 2.79
},
"N4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 17.99,
"z": 2.79
},
"O4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 13.49,
"z": 2.79
},
"P4": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 25.62,
"y": 8.99,
"z": 2.79
},
"A5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 76.49,
"z": 2.79
},
"B5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 71.99,
"z": 2.79
},
"C5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 67.49,
"z": 2.79
},
"D5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 62.99,
"z": 2.79
},
"E5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 58.49,
"z": 2.79
},
"F5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 53.99,
"z": 2.79
},
"G5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 49.49,
"z": 2.79
},
"H5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 44.99,
"z": 2.79
},
"I5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 40.49,
"z": 2.79
},
"J5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 35.99,
"z": 2.79
},
"K5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 31.49,
"z": 2.79
},
"L5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 26.99,
"z": 2.79
},
"M5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 22.49,
"z": 2.79
},
"N5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 17.99,
"z": 2.79
},
"O5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 13.49,
"z": 2.79
},
"P5": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 30.12,
"y": 8.99,
"z": 2.79
},
"A6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 76.49,
"z": 2.79
},
"B6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 71.99,
"z": 2.79
},
"C6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 67.49,
"z": 2.79
},
"D6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 62.99,
"z": 2.79
},
"E6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 58.49,
"z": 2.79
},
"F6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 53.99,
"z": 2.79
},
"G6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 49.49,
"z": 2.79
},
"H6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 44.99,
"z": 2.79
},
"I6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 40.49,
"z": 2.79
},
"J6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 35.99,
"z": 2.79
},
"K6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 31.49,
"z": 2.79
},
"L6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 26.99,
"z": 2.79
},
"M6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 22.49,
"z": 2.79
},
"N6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 17.99,
"z": 2.79
},
"O6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 13.49,
"z": 2.79
},
"P6": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 34.62,
"y": 8.99,
"z": 2.79
},
"A7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 76.49,
"z": 2.79
},
"B7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 71.99,
"z": 2.79
},
"C7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 67.49,
"z": 2.79
},
"D7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 62.99,
"z": 2.79
},
"E7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 58.49,
"z": 2.79
},
"F7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 53.99,
"z": 2.79
},
"G7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 49.49,
"z": 2.79
},
"H7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 44.99,
"z": 2.79
},
"I7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 40.49,
"z": 2.79
},
"J7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 35.99,
"z": 2.79
},
"K7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 31.49,
"z": 2.79
},
"L7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 26.99,
"z": 2.79
},
"M7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 22.49,
"z": 2.79
},
"N7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 17.99,
"z": 2.79
},
"O7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 13.49,
"z": 2.79
},
"P7": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 39.12,
"y": 8.99,
"z": 2.79
},
"A8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 76.49,
"z": 2.79
},
"B8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 71.99,
"z": 2.79
},
"C8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 67.49,
"z": 2.79
},
"D8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 62.99,
"z": 2.79
},
"E8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 58.49,
"z": 2.79
},
"F8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 53.99,
"z": 2.79
},
"G8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 49.49,
"z": 2.79
},
"H8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 44.99,
"z": 2.79
},
"I8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 40.49,
"z": 2.79
},
"J8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 35.99,
"z": 2.79
},
"K8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 31.49,
"z": 2.79
},
"L8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 26.99,
"z": 2.79
},
"M8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 22.49,
"z": 2.79
},
"N8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 17.99,
"z": 2.79
},
"O8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 13.49,
"z": 2.79
},
"P8": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 43.62,
"y": 8.99,
"z": 2.79
},
"A9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 76.49,
"z": 2.79
},
"B9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 71.99,
"z": 2.79
},
"C9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 67.49,
"z": 2.79
},
"D9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 62.99,
"z": 2.79
},
"E9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 58.49,
"z": 2.79
},
"F9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 53.99,
"z": 2.79
},
"G9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 49.49,
"z": 2.79
},
"H9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 44.99,
"z": 2.79
},
"I9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 40.49,
"z": 2.79
},
"J9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 35.99,
"z": 2.79
},
"K9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 31.49,
"z": 2.79
},
"L9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 26.99,
"z": 2.79
},
"M9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 22.49,
"z": 2.79
},
"N9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 17.99,
"z": 2.79
},
"O9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 13.49,
"z": 2.79
},
"P9": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 48.12,
"y": 8.99,
"z": 2.79
},
"A10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 76.49,
"z": 2.79
},
"B10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 71.99,
"z": 2.79
},
"C10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 67.49,
"z": 2.79
},
"D10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 62.99,
"z": 2.79
},
"E10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 58.49,
"z": 2.79
},
"F10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 53.99,
"z": 2.79
},
"G10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 49.49,
"z": 2.79
},
"H10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 44.99,
"z": 2.79
},
"I10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 40.49,
"z": 2.79
},
"J10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 35.99,
"z": 2.79
},
"K10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 31.49,
"z": 2.79
},
"L10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 26.99,
"z": 2.79
},
"M10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 22.49,
"z": 2.79
},
"N10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 17.99,
"z": 2.79
},
"O10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 13.49,
"z": 2.79
},
"P10": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 52.62,
"y": 8.99,
"z": 2.79
},
"A11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 76.49,
"z": 2.79
},
"B11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 71.99,
"z": 2.79
},
"C11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 67.49,
"z": 2.79
},
"D11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 62.99,
"z": 2.79
},
"E11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 58.49,
"z": 2.79
},
"F11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 53.99,
"z": 2.79
},
"G11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 49.49,
"z": 2.79
},
"H11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 44.99,
"z": 2.79
},
"I11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 40.49,
"z": 2.79
},
"J11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 35.99,
"z": 2.79
},
"K11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 31.49,
"z": 2.79
},
"L11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 26.99,
"z": 2.79
},
"M11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 22.49,
"z": 2.79
},
"N11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 17.99,
"z": 2.79
},
"O11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 13.49,
"z": 2.79
},
"P11": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 57.12,
"y": 8.99,
"z": 2.79
},
"A12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 76.49,
"z": 2.79
},
"B12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 71.99,
"z": 2.79
},
"C12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 67.49,
"z": 2.79
},
"D12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 62.99,
"z": 2.79
},
"E12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 58.49,
"z": 2.79
},
"F12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 53.99,
"z": 2.79
},
"G12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 49.49,
"z": 2.79
},
"H12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 44.99,
"z": 2.79
},
"I12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 40.49,
"z": 2.79
},
"J12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 35.99,
"z": 2.79
},
"K12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 31.49,
"z": 2.79
},
"L12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 26.99,
"z": 2.79
},
"M12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 22.49,
"z": 2.79
},
"N12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 17.99,
"z": 2.79
},
"O12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 13.49,
"z": 2.79
},
"P12": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 61.62,
"y": 8.99,
"z": 2.79
},
"A13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 76.49,
"z": 2.79
},
"B13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 71.99,
"z": 2.79
},
"C13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 67.49,
"z": 2.79
},
"D13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 62.99,
"z": 2.79
},
"E13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 58.49,
"z": 2.79
},
"F13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 53.99,
"z": 2.79
},
"G13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 49.49,
"z": 2.79
},
"H13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 44.99,
"z": 2.79
},
"I13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 40.49,
"z": 2.79
},
"J13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 35.99,
"z": 2.79
},
"K13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 31.49,
"z": 2.79
},
"L13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 26.99,
"z": 2.79
},
"M13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 22.49,
"z": 2.79
},
"N13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 17.99,
"z": 2.79
},
"O13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 13.49,
"z": 2.79
},
"P13": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 66.12,
"y": 8.99,
"z": 2.79
},
"A14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 76.49,
"z": 2.79
},
"B14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 71.99,
"z": 2.79
},
"C14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 67.49,
"z": 2.79
},
"D14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 62.99,
"z": 2.79
},
"E14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 58.49,
"z": 2.79
},
"F14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 53.99,
"z": 2.79
},
"G14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 49.49,
"z": 2.79
},
"H14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 44.99,
"z": 2.79
},
"I14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 40.49,
"z": 2.79
},
"J14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 35.99,
"z": 2.79
},
"K14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 31.49,
"z": 2.79
},
"L14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 26.99,
"z": 2.79
},
"M14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 22.49,
"z": 2.79
},
"N14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 17.99,
"z": 2.79
},
"O14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 13.49,
"z": 2.79
},
"P14": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 70.62,
"y": 8.99,
"z": 2.79
},
"A15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 76.49,
"z": 2.79
},
"B15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 71.99,
"z": 2.79
},
"C15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 67.49,
"z": 2.79
},
"D15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 62.99,
"z": 2.79
},
"E15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 58.49,
"z": 2.79
},
"F15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 53.99,
"z": 2.79
},
"G15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 49.49,
"z": 2.79
},
"H15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 44.99,
"z": 2.79
},
"I15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 40.49,
"z": 2.79
},
"J15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 35.99,
"z": 2.79
},
"K15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 31.49,
"z": 2.79
},
"L15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 26.99,
"z": 2.79
},
"M15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 22.49,
"z": 2.79
},
"N15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 17.99,
"z": 2.79
},
"O15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 13.49,
"z": 2.79
},
"P15": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 75.12,
"y": 8.99,
"z": 2.79
},
"A16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 76.49,
"z": 2.79
},
"B16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 71.99,
"z": 2.79
},
"C16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 67.49,
"z": 2.79
},
"D16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 62.99,
"z": 2.79
},
"E16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 58.49,
"z": 2.79
},
"F16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 53.99,
"z": 2.79
},
"G16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 49.49,
"z": 2.79
},
"H16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 44.99,
"z": 2.79
},
"I16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 40.49,
"z": 2.79
},
"J16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 35.99,
"z": 2.79
},
"K16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 31.49,
"z": 2.79
},
"L16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 26.99,
"z": 2.79
},
"M16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 22.49,
"z": 2.79
},
"N16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 17.99,
"z": 2.79
},
"O16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 13.49,
"z": 2.79
},
"P16": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 79.62,
"y": 8.99,
"z": 2.79
},
"A17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 76.49,
"z": 2.79
},
"B17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 71.99,
"z": 2.79
},
"C17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 67.49,
"z": 2.79
},
"D17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 62.99,
"z": 2.79
},
"E17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 58.49,
"z": 2.79
},
"F17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 53.99,
"z": 2.79
},
"G17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 49.49,
"z": 2.79
},
"H17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 44.99,
"z": 2.79
},
"I17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 40.49,
"z": 2.79
},
"J17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 35.99,
"z": 2.79
},
"K17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 31.49,
"z": 2.79
},
"L17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 26.99,
"z": 2.79
},
"M17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 22.49,
"z": 2.79
},
"N17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 17.99,
"z": 2.79
},
"O17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 13.49,
"z": 2.79
},
"P17": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 84.12,
"y": 8.99,
"z": 2.79
},
"A18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 76.49,
"z": 2.79
},
"B18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 71.99,
"z": 2.79
},
"C18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 67.49,
"z": 2.79
},
"D18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 62.99,
"z": 2.79
},
"E18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 58.49,
"z": 2.79
},
"F18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 53.99,
"z": 2.79
},
"G18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 49.49,
"z": 2.79
},
"H18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 44.99,
"z": 2.79
},
"I18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 40.49,
"z": 2.79
},
"J18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 35.99,
"z": 2.79
},
"K18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 31.49,
"z": 2.79
},
"L18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 26.99,
"z": 2.79
},
"M18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 22.49,
"z": 2.79
},
"N18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 17.99,
"z": 2.79
},
"O18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 13.49,
"z": 2.79
},
"P18": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 88.62,
"y": 8.99,
"z": 2.79
},
"A19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 76.49,
"z": 2.79
},
"B19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 71.99,
"z": 2.79
},
"C19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 67.49,
"z": 2.79
},
"D19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 62.99,
"z": 2.79
},
"E19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 58.49,
"z": 2.79
},
"F19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 53.99,
"z": 2.79
},
"G19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 49.49,
"z": 2.79
},
"H19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 44.99,
"z": 2.79
},
"I19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 40.49,
"z": 2.79
},
"J19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 35.99,
"z": 2.79
},
"K19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 31.49,
"z": 2.79
},
"L19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 26.99,
"z": 2.79
},
"M19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 22.49,
"z": 2.79
},
"N19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 17.99,
"z": 2.79
},
"O19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 13.49,
"z": 2.79
},
"P19": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 93.12,
"y": 8.99,
"z": 2.79
},
"A20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 76.49,
"z": 2.79
},
"B20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 71.99,
"z": 2.79
},
"C20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 67.49,
"z": 2.79
},
"D20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 62.99,
"z": 2.79
},
"E20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 58.49,
"z": 2.79
},
"F20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 53.99,
"z": 2.79
},
"G20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 49.49,
"z": 2.79
},
"H20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 44.99,
"z": 2.79
},
"I20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 40.49,
"z": 2.79
},
"J20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 35.99,
"z": 2.79
},
"K20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 31.49,
"z": 2.79
},
"L20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 26.99,
"z": 2.79
},
"M20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 22.49,
"z": 2.79
},
"N20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 17.99,
"z": 2.79
},
"O20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 13.49,
"z": 2.79
},
"P20": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 97.62,
"y": 8.99,
"z": 2.79
},
"A21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 76.49,
"z": 2.79
},
"B21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 71.99,
"z": 2.79
},
"C21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 67.49,
"z": 2.79
},
"D21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 62.99,
"z": 2.79
},
"E21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 58.49,
"z": 2.79
},
"F21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 53.99,
"z": 2.79
},
"G21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 49.49,
"z": 2.79
},
"H21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 44.99,
"z": 2.79
},
"I21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 40.49,
"z": 2.79
},
"J21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 35.99,
"z": 2.79
},
"K21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 31.49,
"z": 2.79
},
"L21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 26.99,
"z": 2.79
},
"M21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 22.49,
"z": 2.79
},
"N21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 17.99,
"z": 2.79
},
"O21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 13.49,
"z": 2.79
},
"P21": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 102.12,
"y": 8.99,
"z": 2.79
},
"A22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 76.49,
"z": 2.79
},
"B22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 71.99,
"z": 2.79
},
"C22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 67.49,
"z": 2.79
},
"D22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 62.99,
"z": 2.79
},
"E22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 58.49,
"z": 2.79
},
"F22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 53.99,
"z": 2.79
},
"G22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 49.49,
"z": 2.79
},
"H22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 44.99,
"z": 2.79
},
"I22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 40.49,
"z": 2.79
},
"J22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 35.99,
"z": 2.79
},
"K22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 31.49,
"z": 2.79
},
"L22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 26.99,
"z": 2.79
},
"M22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 22.49,
"z": 2.79
},
"N22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 17.99,
"z": 2.79
},
"O22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 13.49,
"z": 2.79
},
"P22": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 106.62,
"y": 8.99,
"z": 2.79
},
"A23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 76.49,
"z": 2.79
},
"B23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 71.99,
"z": 2.79
},
"C23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 67.49,
"z": 2.79
},
"D23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 62.99,
"z": 2.79
},
"E23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 58.49,
"z": 2.79
},
"F23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 53.99,
"z": 2.79
},
"G23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 49.49,
"z": 2.79
},
"H23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 44.99,
"z": 2.79
},
"I23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 40.49,
"z": 2.79
},
"J23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 35.99,
"z": 2.79
},
"K23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 31.49,
"z": 2.79
},
"L23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 26.99,
"z": 2.79
},
"M23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 22.49,
"z": 2.79
},
"N23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 17.99,
"z": 2.79
},
"O23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 13.49,
"z": 2.79
},
"P23": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 111.12,
"y": 8.99,
"z": 2.79
},
"A24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 76.49,
"z": 2.79
},
"B24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 71.99,
"z": 2.79
},
"C24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 67.49,
"z": 2.79
},
"D24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 62.99,
"z": 2.79
},
"E24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 58.49,
"z": 2.79
},
"F24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 53.99,
"z": 2.79
},
"G24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 49.49,
"z": 2.79
},
"H24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 44.99,
"z": 2.79
},
"I24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 40.49,
"z": 2.79
},
"J24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 35.99,
"z": 2.79
},
"K24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 31.49,
"z": 2.79
},
"L24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 26.99,
"z": 2.79
},
"M24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 22.49,
"z": 2.79
},
"N24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 17.99,
"z": 2.79
},
"O24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 13.49,
"z": 2.79
},
"P24": {
"depth": 11.43,
"totalLiquidVolume": 112,
"shape": "rectangular",
"xDimension": 3.63,
"yDimension": 3.63,
"x": 115.62,
"y": 8.99,
"z": 2.79
}
},
"groups": [
{
"metadata": {
"wellBottomShape": "flat"
},
"wells": [
"A1",
"B1",
"C1",
"D1",
"E1",
"F1",
"G1",
"H1",
"I1",
"J1",
"K1",
"L1",
"M1",
"N1",
"O1",
"P1",
"A2",
"B2",
"C2",
"D2",
"E2",
"F2",
"G2",
"H2",
"I2",
"J2",
"K2",
"L2",
"M2",
"N2",
"O2",
"P2",
"A3",
"B3",
"C3",
"D3",
"E3",
"F3",
"G3",
"H3",
"I3",
"J3",
"K3",
"L3",
"M3",
"N3",
"O3",
"P3",
"A4",
"B4",
"C4",
"D4",
"E4",
"F4",
"G4",
"H4",
"I4",
"J4",
"K4",
"L4",
"M4",
"N4",
"O4",
"P4",
"A5",
"B5",
"C5",
"D5",
"E5",
"F5",
"G5",
"H5",
"I5",
"J5",
"K5",
"L5",
"M5",
"N5",
"O5",
"P5",
"A6",
"B6",
"C6",
"D6",
"E6",
"F6",
"G6",
"H6",
"I6",
"J6",
"K6",
"L6",
"M6",
"N6",
"O6",
"P6",
"A7",
"B7",
"C7",
"D7",
"E7",
"F7",
"G7",
"H7",
"I7",
"J7",
"K7",
"L7",
"M7",
"N7",
"O7",
"P7",
"A8",
"B8",
"C8",
"D8",
"E8",
"F8",
"G8",
"H8",
"I8",
"J8",
"K8",
"L8",
"M8",
"N8",
"O8",
"P8",
"A9",
"B9",
"C9",
"D9",
"E9",
"F9",
"G9",
"H9",
"I9",
"J9",
"K9",
"L9",
"M9",
"N9",
"O9",
"P9",
"A10",
"B10",
"C10",
"D10",
"E10",
"F10",
"G10",
"H10",
"I10",
"J10",
"K10",
"L10",
"M10",
"N10",
"O10",
"P10",
"A11",
"B11",
"C11",
"D11",
"E11",
"F11",
"G11",
"H11",
"I11",
"J11",
"K11",
"L11",
"M11",
"N11",
"O11",
"P11",
"A12",
"B12",
"C12",
"D12",
"E12",
"F12",
"G12",
"H12",
"I12",
"J12",
"K12",
"L12",
"M12",
"N12",
"O12",
"P12",
"A13",
"B13",
"C13",
"D13",
"E13",
"F13",
"G13",
"H13",
"I13",
"J13",
"K13",
"L13",
"M13",
"N13",
"O13",
"P13",
"A14",
"B14",
"C14",
"D14",
"E14",
"F14",
"G14",
"H14",
"I14",
"J14",
"K14",
"L14",
"M14",
"N14",
"O14",
"P14",
"A15",
"B15",
"C15",
"D15",
"E15",
"F15",
"G15",
"H15",
"I15",
"J15",
"K15",
"L15",
"M15",
"N15",
"O15",
"P15",
"A16",
"B16",
"C16",
"D16",
"E16",
"F16",
"G16",
"H16",
"I16",
"J16",
"K16",
"L16",
"M16",
"N16",
"O16",
"P16",
"A17",
"B17",
"C17",
"D17",
"E17",
"F17",
"G17",
"H17",
"I17",
"J17",
"K17",
"L17",
"M17",
"N17",
"O17",
"P17",
"A18",
"B18",
"C18",
"D18",
"E18",
"F18",
"G18",
"H18",
"I18",
"J18",
"K18",
"L18",
"M18",
"N18",
"O18",
"P18",
"A19",
"B19",
"C19",
"D19",
"E19",
"F19",
"G19",
"H19",
"I19",
"J19",
"K19",
"L19",
"M19",
"N19",
"O19",
"P19",
"A20",
"B20",
"C20",
"D20",
"E20",
"F20",
"G20",
"H20",
"I20",
"J20",
"K20",
"L20",
"M20",
"N20",
"O20",
"P20",
"A21",
"B21",
"C21",
"D21",
"E21",
"F21",
"G21",
"H21",
"I21",
"J21",
"K21",
"L21",
"M21",
"N21",
"O21",
"P21",
"A22",
"B22",
"C22",
"D22",
"E22",
"F22",
"G22",
"H22",
"I22",
"J22",
"K22",
"L22",
"M22",
"N22",
"O22",
"P22",
"A23",
"B23",
"C23",
"D23",
"E23",
"F23",
"G23",
"H23",
"I23",
"J23",
"K23",
"L23",
"M23",
"N23",
"O23",
"P23",
"A24",
"B24",
"C24",
"D24",
"E24",
"F24",
"G24",
"H24",
"I24",
"J24",
"K24",
"L24",
"M24",
"N24",
"O24",
"P24"
]
}
],
"parameters": {
"format": "irregular",
"quirks": [],
"isTiprack": false,
"isMagneticModuleCompatible": false,
"loadName": "testinglc_384_wellplate_112ul"
},
"namespace": "custom_beta",
"version": 1,
"schemaVersion": 2,
"cornerOffsetFromSlot": {
"x": 0,
"y": 0,
"z": 0
}
}"""
LABWARE_DEF = json.loads(LABWARE_DEF_JSON)
LABWARE_LABEL = LABWARE_DEF.get('metadata', {}).get(
'displayName', 'test labware')
LABWARE_DIMENSIONS = LABWARE_DEF.get('wells', {}).get('A1', {}).get('yDimension')
metadata = {'apiLevel': '2.0'}
def run(protocol: protocol_api.ProtocolContext):
tiprack = protocol.load_labware(TIPRACK_LOADNAME, TIPRACK_SLOT)
pipette = protocol.load_instrument(
PIPETTE_NAME, PIPETTE_MOUNT, tip_racks=[tiprack])
test_labware = protocol.load_labware_from_definition(
LABWARE_DEF,
TEST_LABWARE_SLOT,
LABWARE_LABEL,
)
num_cols = len(LABWARE_DEF.get('ordering', [[]]))
num_rows = len(LABWARE_DEF.get('ordering', [[]])[0])
total = num_cols * num_rows
pipette.pick_up_tip()
def set_speeds(rate):
protocol.max_speeds.update({
'X': (600 * rate),
'Y': (400 * rate),
'Z': (125 * rate),
'A': (125 * rate),
})
speed_max = max(protocol.max_speeds.values())
for instr in protocol.loaded_instruments.values():
instr.default_speed = speed_max
set_speeds(RATE)
pipette.home()
# protocol.pause(f"Place your labware in Slot {TEST_LABWARE_SLOT}")
if(PIPETTE_NAME == 'p20_single_gen2' or PIPETTE_NAME == 'p300_single_gen2' or PIPETTE_NAME == 'p1000_single_gen2' or PIPETTE_NAME == 'p50_single' or PIPETTE_NAME == 'p10_single' or PIPETTE_NAME == 'p300_single' or PIPETTE_NAME == 'p1000_single'):
if(total > 1):
#testing with single channel
well = test_labware.well('A1')
all_4_edges = [
[well._from_center_cartesian(x=-1, y=0, z=1), 'left'],
[well._from_center_cartesian(x=1, y=0, z=1), 'right'],
[well._from_center_cartesian(x=0, y=-1, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
set_speeds(RATE)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
#test bottom of first well
well = test_labware.well('A1')
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
#last well testing
last_well = (num_cols) * (num_rows)
well = test_labware.well(last_well-1)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
all_4_edges = [
[well._from_center_cartesian(x=-1, y=0, z=1), 'left'],
[well._from_center_cartesian(x=1, y=0, z=1), 'right'],
[well._from_center_cartesian(x=0, y=-1, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
set_speeds(RATE)
#test bottom of last well
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
else:
#testing with single channel + 1 well labware
well = test_labware.well('A1')
all_4_edges = [
[well._from_center_cartesian(x=-1, y=0, z=1), 'left'],
[well._from_center_cartesian(x=1, y=0, z=1), 'right'],
[well._from_center_cartesian(x=0, y=-1, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
set_speeds(RATE)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
#test bottom of first well
well = test_labware.well('A1')
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
else:
#testing for multichannel
if(total == 96 or total == 384): #testing for 96 well plates and 384 first column
#test first column
well = test_labware.well('A1')
all_4_edges = [
[well._from_center_cartesian(x=-1, y=0, z=1), 'left'],
[well._from_center_cartesian(x=1, y=0, z=1), 'right'],
[well._from_center_cartesian(x=0, y=-1, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
set_speeds(RATE)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
#test bottom of first column
well = test_labware.well('A1')
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
#test last column
if(total == 96):
last_col = (num_cols * num_rows) - num_rows
well = test_labware.well(last_col)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
all_4_edges = [
[well._from_center_cartesian(x=-1, y=0, z=1), 'left'],
[well._from_center_cartesian(x=1, y=0, z=1), 'right'],
[well._from_center_cartesian(x=0, y=-1, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
set_speeds(RATE)
#test bottom of last column
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
elif(total == 384):
#testing for 384 well plates - need to hit well 369, last column
well369 = (total) - (num_rows) + 1
well = test_labware.well(well369)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
all_4_edges = [
[well._from_center_cartesian(x=-1, y=0, z=1), 'left'],
[well._from_center_cartesian(x=1, y=0, z=1), 'right'],
[well._from_center_cartesian(x=0, y=-1, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
set_speeds(RATE)
#test bottom of last column
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
elif(num_rows == 1 and total > 1 and LABWARE_DIMENSIONS >= 71.2):
#for 1 row reservoirs - ex: 12 well reservoirs
well = test_labware.well('A1')
all_4_edges = [
[well._from_center_cartesian(x=-1, y=1, z=1), 'left'],
[well._from_center_cartesian(x=1, y=1, z=1), 'right'],
[well._from_center_cartesian(x=0, y=0.75, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
set_speeds(RATE)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
#test bottom of first well
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
#test last well
well = test_labware.well(-1)
all_4_edges = [
[well._from_center_cartesian(x=-1, y=1, z=1), 'left'],
[well._from_center_cartesian(x=1, y=1, z=1), 'right'],
[well._from_center_cartesian(x=0, y=0.75, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
set_speeds(RATE)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
#test bottom of first well
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
elif(total == 1 and LABWARE_DIMENSIONS >= 71.2 ):
#for 1 well reservoirs
well = test_labware.well('A1')
all_4_edges = [
[well._from_center_cartesian(x=-1, y=1, z=1), 'left'],
[well._from_center_cartesian(x=1, y=1, z=1), 'right'],
[well._from_center_cartesian(x=0, y=0.75, z=1), 'front'],
[well._from_center_cartesian(x=0, y=1, z=1), 'back']
]
set_speeds(RATE)
pipette.move_to(well.top())
protocol.pause("Moved to the top of the well")
for edge_pos, edge_name in all_4_edges:
set_speeds(RATE)
edge_location = types.Location(point=edge_pos, labware=None)
pipette.move_to(edge_location)
protocol.pause(f'Moved to {edge_name} edge')
#test bottom of first well
pipette.move_to(well.bottom())
protocol.pause("Moved to the bottom of the well")
pipette.blow_out(well)
else:
#for incompatible labwares
protocol.pause("labware is incompatible to calibrate with a multichannel pipette")
set_speeds(1.0)
pipette.return_tip()
| 26.180706
| 250
| 0.327532
| 11,212
| 130,537
| 3.780592
| 0.053514
| 0.054355
| 0.081533
| 0.235538
| 0.932906
| 0.927479
| 0.927479
| 0.853756
| 0.851727
| 0.850288
| 0
| 0.178561
| 0.510185
| 130,537
| 4,985
| 251
| 26.185958
| 0.484384
| 0.006297
| 0
| 0.807677
| 0
| 0
| 0.923654
| 0.001041
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000406
| false
| 0
| 0.000609
| 0
| 0.001015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
561ca7f68e2eb8605e2dc3cdf974da70fc0661f2
| 2,493
|
py
|
Python
|
srxgui/MainWindow/ElementIndexGenerator.py
|
NSLS-II-SRX/srxgui
|
ad720830629b2ea54f149d95987b9564f5351bcf
|
[
"BSD-3-Clause"
] | null | null | null |
srxgui/MainWindow/ElementIndexGenerator.py
|
NSLS-II-SRX/srxgui
|
ad720830629b2ea54f149d95987b9564f5351bcf
|
[
"BSD-3-Clause"
] | null | null | null |
srxgui/MainWindow/ElementIndexGenerator.py
|
NSLS-II-SRX/srxgui
|
ad720830629b2ea54f149d95987b9564f5351bcf
|
[
"BSD-3-Clause"
] | 3
|
2019-06-25T20:03:48.000Z
|
2019-09-06T19:38:44.000Z
|
#Fills in the Element Energy Index
import xraylib
#Generating the Element Tab
file = open("elements.txt","w")
file.write('Element\tEdge\tEdge Energy\n\n')
for i in range(0, 103):
edge = 'K'
y = xraylib.AtomicNumberToSymbol(i)
if (xraylib.EdgeEnergy(i, xraylib.K_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.K_SHELL) <= 25):
file.write(y + '\t' + edge + '\t'+ str(round(xraylib.EdgeEnergy(i, xraylib.K_SHELL)*1000,1)) + '\n')
if (xraylib.EdgeEnergy(i, xraylib.L1_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.L1_SHELL) <= 25):
file.write(y + '\t' + 'L1' + '\t' + str(round(xraylib.EdgeEnergy(i, xraylib.L1_SHELL)*1000, 1)) + '\n')
if (xraylib.EdgeEnergy(i, xraylib.L2_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.L2_SHELL) <= 25):
file.write(y + '\t' + 'L2' + '\t' + str(round(xraylib.EdgeEnergy(i, xraylib.L2_SHELL)*1000,1)) + '\n')
if (xraylib.EdgeEnergy(i, xraylib.L3_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.L3_SHELL) <= 25):
file.write(y + '\t' + 'L3' + '\t' + str(round(xraylib.EdgeEnergy(i, xraylib.L3_SHELL)*1000,1)) + '\n')
file.close()
#Generating the Edge Tab
file = open("edges.txt","w")
file.write('Element\tEdge\tEdge Energy\n\n')
for i in range(0, 103):
edge = 'K'
y = xraylib.AtomicNumberToSymbol(i)
if (xraylib.EdgeEnergy(i, xraylib.K_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.K_SHELL) <= 25):
file.write(y + '\t' + edge + '\t'+ str(round(xraylib.EdgeEnergy(i, xraylib.K_SHELL)*1000,1)) + '\n')
for i in range(0, 103):
y = xraylib.AtomicNumberToSymbol(i)
if (xraylib.EdgeEnergy(i, xraylib.L1_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.L1_SHELL) <= 25):
file.write(y + '\t' + 'L1' + '\t' + str(round(xraylib.EdgeEnergy(i, xraylib.L1_SHELL)*1000, 1)) + '\n')
for i in range(0, 103):
y = xraylib.AtomicNumberToSymbol(i)
if (xraylib.EdgeEnergy(i, xraylib.L2_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.L2_SHELL) <= 25):
file.write(y + '\t' + 'L2' + '\t' + str(round(xraylib.EdgeEnergy(i, xraylib.L2_SHELL)*1000,1)) + '\n')
for i in range(0, 103):
y = xraylib.AtomicNumberToSymbol(i)
if (xraylib.EdgeEnergy(i, xraylib.L3_SHELL) >= 4.5):
if (xraylib.EdgeEnergy(i, xraylib.L3_SHELL) <= 25):
file.write(y + '\t' + 'L3' + '\t' + str(round(xraylib.EdgeEnergy(i, xraylib.L3_SHELL)*1000,1)) + '\n')
file.close()
| 42.982759
| 115
| 0.607702
| 372
| 2,493
| 4.008065
| 0.115591
| 0.273642
| 0.289738
| 0.402414
| 0.924212
| 0.924212
| 0.924212
| 0.924212
| 0.924212
| 0.924212
| 0
| 0.057711
| 0.193742
| 2,493
| 58
| 116
| 42.982759
| 0.68408
| 0.032892
| 0
| 0.930233
| 0
| 0
| 0.060191
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.023256
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
5626e369d8c5cd08c588e32fd0f81ae0e34e20cf
| 245
|
py
|
Python
|
app/settings_prod.py
|
eocode/Queens
|
11a08a315dc76e7d2ddc9c742380dcfa9fd58e23
|
[
"BSD-3-Clause"
] | null | null | null |
app/settings_prod.py
|
eocode/Queens
|
11a08a315dc76e7d2ddc9c742380dcfa9fd58e23
|
[
"BSD-3-Clause"
] | null | null | null |
app/settings_prod.py
|
eocode/Queens
|
11a08a315dc76e7d2ddc9c742380dcfa9fd58e23
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Settings for a production/development env
"""
from os import environ
SQLALCHEMY_DATABASE_URI = f"postgresql://{environ.get('DB_USER')}:{environ.get('DB_PASSWORD')}@{environ.get('DB_HOST')}:{environ.get('DB_PORT')}/{environ.get('DB_NAME')}"
| 35
| 170
| 0.726531
| 35
| 245
| 4.885714
| 0.628571
| 0.292398
| 0.350877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061224
| 245
| 6
| 171
| 40.833333
| 0.743478
| 0.167347
| 0
| 0
| 0
| 0.5
| 0.719388
| 0.719388
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.5
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
3b179df5b4b6bbf0a207ae40b23ac416d4f98011
| 4,924
|
py
|
Python
|
rankings/forms.py
|
dlanghorne0428/dancesport-tracker-projec
|
e55d91a4f03c26d6ee8c28846a809064adfdb158
|
[
"MIT"
] | null | null | null |
rankings/forms.py
|
dlanghorne0428/dancesport-tracker-projec
|
e55d91a4f03c26d6ee8c28846a809064adfdb158
|
[
"MIT"
] | 87
|
2020-04-15T22:29:03.000Z
|
2022-01-02T02:21:28.000Z
|
rankings/forms.py
|
dlanghorne0428/dancesport-tracker-projec
|
e55d91a4f03c26d6ee8c28846a809064adfdb158
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.forms import ModelForm, ModelChoiceField
from .models import Dancer, Couple
class DancerForm(ModelForm):
class Meta:
model = Dancer
fields = ['name_first', 'name_middle', 'name_last', 'dancer_type']
class CoupleTypeForm(ModelForm):
class Meta:
model = Couple
fields = ['couple_type']
class CoupleForm(ModelForm):
dancer_1 = ModelChoiceField(queryset=Dancer.objects.all())
dancer_2 = ModelChoiceField(queryset=Dancer.objects.all())
# couple_type_field = models.CharField(max_length = 3, choices=Couple.COUPLE_TYPE_CHOICES)
class Meta:
model = Couple
fields = ['dancer_1', 'dancer_2', 'couple_type']
def __init__(self, couple_type=None, dancer_position = None, dancer_id = None, dancer_type = None, partner_id = None, **kwargs):
super(CoupleForm, self).__init__(**kwargs)
if couple_type is None:
pass
else:
self.fields['couple_type'].initial = couple_type
if couple_type == Couple.PRO_COUPLE:
if dancer_position == 1:
self.fields['dancer_1'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
self.fields['dancer_1'].initial = dancer_id
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
if partner_id is not None:
self.fields['dancer_2'].initial = partner_id
else:
self.fields['dancer_1'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
self.fields['dancer_2'].initial = dancer_id
elif couple_type == Couple.PRO_AM_COUPLE:
if dancer_position == 1:
self.fields['dancer_1'].queryset = Dancer.objects.exclude(dancer_type=Dancer.PRO)
self.fields['dancer_1'].initial = dancer_id
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
if partner_id is not None:
self.fields['dancer_2'].initial = partner_id
else:
self.fields['dancer_1'].queryset = Dancer.objects.exclude(dancer_type=Dancer.PRO)
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
self.fields['dancer_2'].initial = dancer_id
elif couple_type == Couple.AMATEUR_COUPLE:
if dancer_position == 1:
self.fields['dancer_1'].queryset = Dancer.objects.exclude(dancer_type=Dancer.PRO)
self.fields['dancer_1'].initial = dancer_id
self.fields['dancer_2'].queryset = Dancer.objects.exclude(dancer_type=Dancer.PRO)
if partner_id is not None:
self.fields['dancer_2'].initial = partner_id
else:
self.fields['dancer_1'].queryset = Dancer.objects.exclude(dancer_type=Dancer.PRO)
self.fields['dancer_2'].queryset = Dancer.objects.exclude(dancer_type=Dancer.PRO)
self.fields['dancer_2'].initial = dancer_id
elif couple_type == Couple.JR_PRO_AM_COUPLE:
if dancer_position == 1:
self.fields['dancer_1'].queryset = Dancer.objects.filter(dancer_type=Dancer.JUNIOR_AMATEUR)
self.fields['dancer_1'].initial = dancer_id
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
if partner_id is not None:
self.fields['dancer_2'].initial = partner_id
else:
self.fields['dancer_1'].queryset = Dancer.objects.filter(dancer_type=Dancer.JUNIOR_AMATEUR)
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.PRO)
self.fields['dancer_2'].initial = dancer_id
else: #couple_type == Couple.JUNIOR_AMATEUR
if dancer_position == 1:
self.fields['dancer_1'].queryset = Dancer.objects.filter(dancer_type=Dancer.JUNIOR_AMATEUR)
self.fields['dancer_1'].initial = dancer_id
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.JUNIOR_AMATEUR)
if partner_id is not None:
self.fields['dancer_2'].initial = partner_id
else:
self.fields['dancer_1'].queryset = Dancer.objects.filter(dancer_type=Dancer.JUNIOR_AMATEUR)
self.fields['dancer_2'].queryset = Dancer.objects.filter(dancer_type=Dancer.JUNIOR_AMATEUR)
self.fields['dancer_2'].initial = dancer_id
| 57.255814
| 132
| 0.603168
| 560
| 4,924
| 5.071429
| 0.1
| 0.152113
| 0.197183
| 0.119718
| 0.789437
| 0.742958
| 0.742958
| 0.737676
| 0.737676
| 0.737676
| 0
| 0.012898
| 0.29143
| 4,924
| 85
| 133
| 57.929412
| 0.801089
| 0.025183
| 0
| 0.730769
| 0
| 0
| 0.077132
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012821
| false
| 0.012821
| 0.038462
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b593e26552bea1dfc01b4c6daec744add43e24c
| 49
|
py
|
Python
|
cx_Freeze/samples/relimport/pkg1/sub1.py
|
lexa/cx_Freeze
|
f1f35d19e8e7e821733f86b4da7814c40be3bfd9
|
[
"PSF-2.0"
] | 358
|
2020-07-02T13:00:02.000Z
|
2022-03-29T10:03:57.000Z
|
cx_Freeze/samples/relimport/pkg1/sub1.py
|
lexa/cx_Freeze
|
f1f35d19e8e7e821733f86b4da7814c40be3bfd9
|
[
"PSF-2.0"
] | 372
|
2020-07-02T20:47:57.000Z
|
2022-03-31T19:35:05.000Z
|
cx_Freeze/samples/relimport/pkg1/sub1.py
|
lexa/cx_Freeze
|
f1f35d19e8e7e821733f86b4da7814c40be3bfd9
|
[
"PSF-2.0"
] | 78
|
2020-07-09T14:24:03.000Z
|
2022-03-22T19:06:52.000Z
|
print("importing pkg1.sub1")
from . import sub2
| 12.25
| 28
| 0.734694
| 7
| 49
| 5.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.142857
| 49
| 3
| 29
| 16.333333
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.387755
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
8e5fde1895e89061fcba4d5813bb56be5057e9ab
| 75,724
|
py
|
Python
|
python3/madasterapi/api/material_api.py
|
Madaster/examples
|
bd2e8e464172e0d47cac8ed1672501a24ba624c3
|
[
"MIT"
] | 2
|
2021-04-13T12:19:26.000Z
|
2021-09-13T15:40:44.000Z
|
python3/madasterapi/api/material_api.py
|
Madaster/examples
|
bd2e8e464172e0d47cac8ed1672501a24ba624c3
|
[
"MIT"
] | null | null | null |
python3/madasterapi/api/material_api.py
|
Madaster/examples
|
bd2e8e464172e0d47cac8ed1672501a24ba624c3
|
[
"MIT"
] | null | null | null |
"""
Madaster Private API - Build: 8815
Welcome to the **Madaster Private API** endpoint. This endpoint can be used to interact with the Madaster Platform and its resources. This API does not fully cover all functionality of the platform yet, please see below for the available functions and what they can be used for. For detailed information about the platform and this API, please refer to the [Madaster Documentation](https://docs.madaster.com) or the [Madaster API Documentation](https://docs.madaster.com/api).<br/><br/>To access these resources, you need an authorization token. If you do not have one yet, see the chapter about Authorization in the [API documentation](https://docs.madaster.com/api). This token should be sent as a header with the name 'X-API-Key', which will authenticate the request with the token. The documentation below specifies which requests are available and which responses they might produce.<br/><br/>This API can be reached at the endpoint: **[https://api.madaster.com/](https://api.madaster.com/)** # noqa: E501
The version of the OpenAPI document: v3.0
Contact: service@madaster.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from madasterapi.api_client import ApiClient, Endpoint as _Endpoint
from madasterapi.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from madasterapi.model.matching_criterion import MatchingCriterion
from madasterapi.model.material_financial_settings import MaterialFinancialSettings
from madasterapi.model.material_financial_value import MaterialFinancialValue
from madasterapi.model.material_request import MaterialRequest
from madasterapi.model.material_response import MaterialResponse
class MaterialApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __material_add_financial_value(
self,
database_id,
id,
**kwargs
):
"""Adds a financial value for a material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_add_financial_value(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
Keyword Args:
material_financial_value (MaterialFinancialValue): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_add_financial_value = _Endpoint(
settings={
'response_type': None,
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/financialvalues',
'operation_id': 'material_add_financial_value',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'material_financial_value',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'material_financial_value':
(MaterialFinancialValue,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'material_financial_value': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_add_financial_value
)
def __material_add_match(
self,
database_id,
id,
**kwargs
):
"""Adds a matchingcriterion to a material/product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_add_match(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material/product identifier
Keyword Args:
matching_criterion (MatchingCriterion): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MatchingCriterion
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_add_match = _Endpoint(
settings={
'response_type': (MatchingCriterion,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/matches',
'operation_id': 'material_add_match',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'matching_criterion',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'matching_criterion':
(MatchingCriterion,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'matching_criterion': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_add_match
)
def __material_add_material(
self,
database_id,
**kwargs
):
"""Create a new material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_add_material(database_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
Keyword Args:
material_request (MaterialRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MaterialResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
return self.call_with_http_info(**kwargs)
self.material_add_material = _Endpoint(
settings={
'response_type': (MaterialResponse,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}',
'operation_id': 'material_add_material',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'database_id',
'material_request',
],
'required': [
'database_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'material_request':
(MaterialRequest,),
},
'attribute_map': {
'database_id': 'databaseId',
},
'location_map': {
'database_id': 'path',
'material_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_add_material
)
def __material_delete_financial_value(
self,
database_id,
id,
value_id,
**kwargs
):
"""Deleted a financial value for a material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_delete_financial_value(database_id, id, value_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
value_id (str): The financial value identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
kwargs['value_id'] = \
value_id
return self.call_with_http_info(**kwargs)
self.material_delete_financial_value = _Endpoint(
settings={
'response_type': None,
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/financialvalues/{valueId}',
'operation_id': 'material_delete_financial_value',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'value_id',
],
'required': [
'database_id',
'id',
'value_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'value_id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
'value_id': 'valueId',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'value_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__material_delete_financial_value
)
def __material_delete_match(
self,
database_id,
id,
match_id,
**kwargs
):
"""Deletes a matchingcriterion for a material/product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_delete_match(database_id, id, match_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material/product identifier
match_id (str): The match identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
kwargs['match_id'] = \
match_id
return self.call_with_http_info(**kwargs)
self.material_delete_match = _Endpoint(
settings={
'response_type': None,
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/matches/{matchId}',
'operation_id': 'material_delete_match',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'match_id',
],
'required': [
'database_id',
'id',
'match_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'match_id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
'match_id': 'matchId',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'match_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__material_delete_match
)
def __material_get_financial_settings_by_id(
self,
database_id,
id,
**kwargs
):
"""Gets the financial settings for a material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_get_financial_settings_by_id(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MaterialFinancialSettings
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_get_financial_settings_by_id = _Endpoint(
settings={
'response_type': (MaterialFinancialSettings,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/financial',
'operation_id': 'material_get_financial_settings_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__material_get_financial_settings_by_id
)
def __material_get_financial_values(
self,
database_id,
id,
**kwargs
):
"""Gets the financial values for a material in manual import mode. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_get_financial_values(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[MaterialFinancialValue]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_get_financial_values = _Endpoint(
settings={
'response_type': ([MaterialFinancialValue],),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/financialvalues',
'operation_id': 'material_get_financial_values',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__material_get_financial_values
)
def __material_get_match_by_id(
self,
database_id,
id,
match_id,
**kwargs
):
"""Gets a specific matching criteria for a material/product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_get_match_by_id(database_id, id, match_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material/product identifier
match_id (str): The match identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[MatchingCriterion]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
kwargs['match_id'] = \
match_id
return self.call_with_http_info(**kwargs)
self.material_get_match_by_id = _Endpoint(
settings={
'response_type': ([MatchingCriterion],),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/matches/{matchId}',
'operation_id': 'material_get_match_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'match_id',
],
'required': [
'database_id',
'id',
'match_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'match_id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
'match_id': 'matchId',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'match_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__material_get_match_by_id
)
def __material_get_matches(
self,
database_id,
id,
**kwargs
):
"""Gets the matching criteria for a material/product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_get_matches(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material/product identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[MatchingCriterion]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_get_matches = _Endpoint(
settings={
'response_type': ([MatchingCriterion],),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/matches',
'operation_id': 'material_get_matches',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__material_get_matches
)
def __material_get_material_by_id(
self,
database_id,
id,
**kwargs
):
"""Gets a material by id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_get_material_by_id(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MaterialResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_get_material_by_id = _Endpoint(
settings={
'response_type': (MaterialResponse,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}',
'operation_id': 'material_get_material_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__material_get_material_by_id
)
def __material_get_materials(
self,
database_id,
**kwargs
):
"""Gets all materials from a database # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_get_materials(database_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[MaterialResponse]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
return self.call_with_http_info(**kwargs)
self.material_get_materials = _Endpoint(
settings={
'response_type': ([MaterialResponse],),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}',
'operation_id': 'material_get_materials',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'database_id',
],
'required': [
'database_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
},
'attribute_map': {
'database_id': 'databaseId',
},
'location_map': {
'database_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__material_get_materials
)
def __material_update_financial_settings_by_id(
self,
database_id,
id,
**kwargs
):
"""Updates the financial settings for a material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_update_financial_settings_by_id(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
Keyword Args:
material_financial_settings (MaterialFinancialSettings): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MaterialFinancialSettings
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_update_financial_settings_by_id = _Endpoint(
settings={
'response_type': (MaterialFinancialSettings,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/financial',
'operation_id': 'material_update_financial_settings_by_id',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'material_financial_settings',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'material_financial_settings':
(MaterialFinancialSettings,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'material_financial_settings': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_update_financial_settings_by_id
)
def __material_update_financial_value(
self,
database_id,
id,
value_id,
**kwargs
):
"""Updates a financial value for a material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_update_financial_value(database_id, id, value_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
value_id (str): The financial value identifier
Keyword Args:
material_financial_value (MaterialFinancialValue): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
kwargs['value_id'] = \
value_id
return self.call_with_http_info(**kwargs)
self.material_update_financial_value = _Endpoint(
settings={
'response_type': None,
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/financialvalues/{valueId}',
'operation_id': 'material_update_financial_value',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'value_id',
'material_financial_value',
],
'required': [
'database_id',
'id',
'value_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'value_id':
(str,),
'material_financial_value':
(MaterialFinancialValue,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
'value_id': 'valueId',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'value_id': 'path',
'material_financial_value': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_update_financial_value
)
def __material_update_match(
self,
database_id,
id,
match_id,
**kwargs
):
"""Updates a matchingcriterion for a material/product # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_update_match(database_id, id, match_id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material/product identifier
match_id (str): The match identifier
Keyword Args:
matching_criterion (MatchingCriterion): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MatchingCriterion
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
kwargs['match_id'] = \
match_id
return self.call_with_http_info(**kwargs)
self.material_update_match = _Endpoint(
settings={
'response_type': (MatchingCriterion,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}/matches/{matchId}',
'operation_id': 'material_update_match',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'match_id',
'matching_criterion',
],
'required': [
'database_id',
'id',
'match_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'match_id':
(str,),
'matching_criterion':
(MatchingCriterion,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
'match_id': 'matchId',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'match_id': 'path',
'matching_criterion': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_update_match
)
def __material_update_material(
self,
database_id,
id,
**kwargs
):
"""Update an existing material # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.material_update_material(database_id, id, async_req=True)
>>> result = thread.get()
Args:
database_id (str): The database identifier
id (str): The material identifier
Keyword Args:
material_request (MaterialRequest): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
MaterialResponse
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['database_id'] = \
database_id
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.material_update_material = _Endpoint(
settings={
'response_type': (MaterialResponse,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/api/v3.0/material/{databaseId}/{id}',
'operation_id': 'material_update_material',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'database_id',
'id',
'material_request',
],
'required': [
'database_id',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'database_id':
(str,),
'id':
(str,),
'material_request':
(MaterialRequest,),
},
'attribute_map': {
'database_id': 'databaseId',
'id': 'id',
},
'location_map': {
'database_id': 'path',
'id': 'path',
'material_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'text/json',
'application/*+json'
]
},
api_client=api_client,
callable=__material_update_material
)
| 36.974609
| 1,015
| 0.452855
| 6,328
| 75,724
| 5.159608
| 0.040297
| 0.045942
| 0.02389
| 0.024809
| 0.93023
| 0.920551
| 0.915467
| 0.908637
| 0.903522
| 0.898254
| 0
| 0.002661
| 0.464001
| 75,724
| 2,047
| 1,016
| 36.992672
| 0.801764
| 0.319965
| 0
| 0.742414
| 0
| 0
| 0.22341
| 0.049695
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011291
| false
| 0
| 0.006351
| 0
| 0.028934
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ec9c364db3655b400b619e63ab7b8973128ca52
| 7,073
|
py
|
Python
|
test/test_vep_filter.py
|
gantzgraf/vape
|
f939cb527d72d852cb0919a57332110c15c5fd4a
|
[
"MIT"
] | 4
|
2020-03-25T06:09:39.000Z
|
2021-03-23T11:22:00.000Z
|
test/test_vep_filter.py
|
gantzgraf/vape
|
f939cb527d72d852cb0919a57332110c15c5fd4a
|
[
"MIT"
] | 1
|
2020-10-02T14:50:30.000Z
|
2020-10-12T15:24:24.000Z
|
test/test_vep_filter.py
|
gantzgraf/vape
|
f939cb527d72d852cb0919a57332110c15c5fd4a
|
[
"MIT"
] | 1
|
2021-02-20T11:32:34.000Z
|
2021-02-20T11:32:34.000Z
|
from .utils import *
from vase.vep_filter import VepFilter
from vase.vcf_reader import VcfReader
is_input = os.path.join(dir_path, 'test_data', 'ex5.bcf')
def test_csq():
output = get_tmp_out()
test_args = dict(
csq=[],
output=output,
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_impact():
output = get_tmp_out()
test_args = dict(
impact=["HIGH"],
output=output,
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_insilico_freq():
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
freq=0.01,
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_insilico_pred():
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
biotypes=['default', 'transcribed_processed_pseudogene'],
missense_filters=['sift=deleterious'],
)
results, expected = run_args(test_args, output,
'test_insilico_pred_sift')
assert_equal(results, expected)
os.remove(output)
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
missense_filters=['Polyphen'])
results, expected = run_args(test_args, output,
'test_insilico_pred_polyphen')
assert_equal(results, expected)
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
missense_filters=['Polyphen', 'sift=deleterious'],
)
results, expected = run_args(test_args, output,
'test_insilico_pred_both')
assert_equal(results, expected)
os.remove(output)
def test_insilico_freq_pred():
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
freq=0.01,
missense_filters=['sift=deleterious'],
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_insilico_score():
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
missense_filters=['SIFT_score=0.1'])
results, expected = run_args(test_args, output,
'test_insilico_score')
assert_equal(results, expected)
os.remove(output)
def test_insilico_unpredicted():
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
filter_unpredicted=True,
missense_filters=['SIFT_score=0.1'])
results, expected = run_args(test_args, output,
'test_insilico_unpredicted')
assert_equal(results, expected)
os.remove(output)
def test_insilico_keep_any_damaging():
output = get_tmp_out()
test_args = dict(
input=is_input,
output=output,
csq=[],
keep_if_any_damaging=True,
missense_filters=['Polyphen', 'sift=deleterious'],
)
results, expected = run_args(test_args, output,
sys._getframe().f_code.co_name)
assert_equal(results, expected)
os.remove(output)
def test_canonical():
vcf = VcfReader(is_input)
csq_filter = VepFilter(
vcf=vcf,
csq=[],
canonical=True)
results_alts = []
results_csq = []
expected_alts = [[False]] * 7
expected_csq = [[False, True, True, True],
[False, True, True, True],
[False, True, True, True],
[False, False, True, False],
[False, True, True, True],
[False, True, True, True],
[False, True, True, True]]
for record in vcf:
r_alts, r_csq = csq_filter.filter(record)
results_csq.append(r_csq)
results_alts.append(r_alts)
assert_equal(expected_csq, results_csq)
assert_equal(expected_alts, results_alts)
def test_flags():
vcf = VcfReader(is_input)
csq_filter = VepFilter(
vcf=vcf,
csq=['all'],
biotypes=['all'],
filter_flagged_features=True)
results_alts = []
results_csq = []
expected_alts = [[False]] * 7
expected_csq = [[False, False, False, False],
[False, False, False, False],
[False, False, False, False],
[False, False, False, False],
[False, False, False, False],
[False, True, False, False],
[False, True, False, False]]
for record in vcf:
r_alts, r_csq = csq_filter.filter(record)
results_csq.append(r_csq)
results_alts.append(r_alts)
assert_equal(expected_csq, results_csq)
assert_equal(expected_alts, results_alts)
def test_canonical_stop_gained():
vcf = VcfReader(is_input)
csq_filter = VepFilter(
vcf=vcf,
csq=['stop_gained'],
canonical=True)
results_alts = []
results_csq = []
expected_alts = [[True]] * 5 + [[False]] * 2
expected_csq = [[True, True, True, True],
[True, True, True, True],
[True, True, True, True],
[True, True, True, True],
[True, True, True, True],
[False, True, True, True],
[False, True, True, True]]
for record in vcf:
r_alts, r_csq = csq_filter.filter(record)
results_csq.append(r_csq)
results_alts.append(r_alts)
assert_equal(expected_csq, results_csq)
assert_equal(expected_alts, results_alts)
def test_canonical_lof():
vcf = VcfReader(is_input)
csq_filter = VepFilter(
vcf=vcf,
csq=['stop_gained'],
loftee=True,
canonical=True)
results_alts = []
results_csq = []
expected_alts = [[True]] * 5 + [[False]] + [[True]]
expected_csq = [[True, True, True, True],
[True, True, True, True],
[True, True, True, True],
[True, True, True, True],
[True, True, True, True],
[False, True, True, True],
[True, True, True, True]]
for record in vcf:
r_alts, r_csq = csq_filter.filter(record)
results_csq.append(r_csq)
results_alts.append(r_alts)
assert_equal(expected_csq, results_csq)
assert_equal(expected_alts, results_alts)
if __name__ == '__main__':
import nose
nose.run(defaultTest=__name__)
| 29.594142
| 65
| 0.568076
| 796
| 7,073
| 4.756281
| 0.110553
| 0.126783
| 0.155309
| 0.160592
| 0.883518
| 0.87982
| 0.871104
| 0.845219
| 0.842842
| 0.841521
| 0
| 0.00332
| 0.318677
| 7,073
| 238
| 66
| 29.718487
| 0.78232
| 0
| 0
| 0.772512
| 0
| 0
| 0.046374
| 0.01838
| 0
| 0
| 0
| 0
| 0.085308
| 1
| 0.056872
| false
| 0
| 0.018957
| 0
| 0.075829
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d9321118d0fd07ae26ba0f59cd6221c580bdabc8
| 209
|
py
|
Python
|
meta_policy_search/baselines/__init__.py
|
behzadhaghgoo/cml
|
e659c7ae10a52bbe1cbabf9d359aea43af19eb12
|
[
"MIT"
] | 210
|
2018-10-17T01:04:48.000Z
|
2022-03-09T16:17:06.000Z
|
meta_policy_search/baselines/__init__.py
|
Zhiwei-Z/PrompLimitTest
|
9d109f1a604125411a1e7894c3222cd50a0ec975
|
[
"MIT"
] | 13
|
2018-10-25T20:01:09.000Z
|
2022-01-24T13:11:24.000Z
|
meta_policy_search/baselines/__init__.py
|
Zhiwei-Z/PrompLimitTest
|
9d109f1a604125411a1e7894c3222cd50a0ec975
|
[
"MIT"
] | 55
|
2018-10-18T22:00:51.000Z
|
2021-11-24T00:06:31.000Z
|
from meta_policy_search.baselines.base import Baseline
from meta_policy_search.baselines.linear_baseline import LinearFeatureBaseline
from meta_policy_search.baselines.linear_baseline import LinearTimeBaseline
| 69.666667
| 78
| 0.91866
| 26
| 209
| 7.076923
| 0.423077
| 0.130435
| 0.228261
| 0.326087
| 0.690217
| 0.532609
| 0.532609
| 0.532609
| 0
| 0
| 0
| 0
| 0.052632
| 209
| 3
| 79
| 69.666667
| 0.929293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d9365eb363e85183af0fd99f7f9591c54be924d7
| 3,220
|
py
|
Python
|
lib/SurfacingAlgorithms/huji-rich-Elad3DFast/analytic/sedov_taylor.py
|
GalaxyHunters/Vivid
|
f724e5671b650433d0c26319c86231bd3b246e4e
|
[
"BSD-3-Clause"
] | null | null | null |
lib/SurfacingAlgorithms/huji-rich-Elad3DFast/analytic/sedov_taylor.py
|
GalaxyHunters/Vivid
|
f724e5671b650433d0c26319c86231bd3b246e4e
|
[
"BSD-3-Clause"
] | 222
|
2018-07-25T18:13:57.000Z
|
2021-10-10T14:54:04.000Z
|
lib/SurfacingAlgorithms/huji-rich-Elad3DFast/analytic/sedov_taylor.py
|
GalaxyHunters/Vivid
|
f724e5671b650433d0c26319c86231bd3b246e4e
|
[
"BSD-3-Clause"
] | 2
|
2018-07-29T09:39:40.000Z
|
2018-08-25T19:17:49.000Z
|
def vtoz(v,w,g,n):
"""
Calculates the dimensionless radius
Input:
v - Dimensionless velocity
w - Ambient density power law index
g - Adiabatic index
n - Geometry (1 - plane, 2 - cylinderical, 3 - spherical)
"""
return (4.**(1./(2. + n - w))*((-1. + g)/(1. + g))**((-1. + g)/(2. - n + g*(-2. + \
w)))*(2. + n - (2. + (-1. + g)*n)*v - w)**((g**2.*(n**2. + (-2. + w)**2.) + \
2.*(-2. + n)*(2.*n - w) + g*(-4. - 3.*n**2. - 2.*n*(-4. + w) + w**2.))/((2. + \
(-1. + g)*n)*(2. - n + g*(-2. + w))*(2. + n - w))))/(((1. + g)*v)**(2./(2. + \
n - w))*(-1. + g*v)**((-1. + g)/(2. - n + g*(-2. + w)))*(-((2. - 3.*n + w + \
g*(-2. + n + w))/(1. + g)))**((g**2.*(n**2. + (-2. + w)**2.) + 2.*(-2. + \
n)*(2.*n - w) + g*(-4. - 3.*n**2. - 2.*n*(-4. + w) + w**2.))/((2. + (-1. + \
g)*n)*(2. - n + g*(-2. + w))*(2. + n - w))))
def vtod(v,w,g,n):
"""
Calculates the dimensionless density
Input:
v - Dimensionless velocity
w - Ambient density power law index
g - Adiabatic index
n - Geometry (1 - plane, 2 - cylinderical, 3 - spherical)
"""
return (((-1 + g)/(1 + g))**(-1 + (n - g*w)/(2 - n + g*(-2 + w)) + (-2*n + \
w + g*w)/((-2 + g)*n + w))*(1 + g)**((2*w)/(2 + n - w))*(1 - \
v)**((2*n - (1 + g)*w)/((-2 + g)*n + w))*v**((2*w)/(2 + n - w))*(-1 + \
g*v)**((n - g*w)/(-2 + n - g*(-2 + w)))*(2 + n - (2 + (-1 + g)*n)*v - \
w)**((-(g**3*n*(n**2 + (-2 + w)**2)*w) + 2*(-2 + n)*(2*n**3 - \
n**2*(-4 + w) - 6*n*w + 2*w**2) + g*(-3*n**4 + n**3*(2 - 6*w) - \
2*w*(-4 + w**2) + 2*n*(-4 - 8*w + w**2) + n**2*(12 + 10*w + 3*w**2)) \
+ g**2*(n**4 - 2*(-2 + w)**2*w + n**3*(2 + 3*w) + n**2*(4 - 14*w + \
3*w**2) - n*(-8 + 4*w - 2*w**2 + w**3)))/((2 + (-1 + g)*n)*(2 + n - \
w)*(-((-2 + g)*n**2) + n*(-4 - 2*g*(-3 + w) + g**2*(-2 + w) - w) + (2 \
+ g*(-2 + w))*w))))/(4**(w/(2 + n - w))*(-((2 - 3*n + w + g*(-2 + n + \
w))/(1 + g)))**((-(g**3*n*(n**2 + (-2 + w)**2)*w) + 2*(-2 + \
n)*(2*n**3 - n**2*(-4 + w) - 6*n*w + 2*w**2) + g*(-3*n**4 + n**3*(2 - \
6*w) - 2*w*(-4 + w**2) + 2*n*(-4 - 8*w + w**2) + n**2*(12 + 10*w + \
3*w**2)) + g**2*(n**4 - 2*(-2 + w)**2*w + n**3*(2 + 3*w) + n**2*(4 - \
14*w + 3*w**2) - n*(-8 + 4*w - 2*w**2 + w**3)))/((2 + (-1 + g)*n)*(2 \
+ n - w)*(-((-2 + g)*n**2) + n*(-4 - 2*g*(-3 + w) + g**2*(-2 + w) - \
w) + (2 + g*(-2 + w))*w))))
def vtop(v,w,g,n):
"""
Calculates the dimensionless pressure
Input:
v - Dimensionless velocity
w - Ambient density power law index
g - Adiabatic index
n - Geometry (1 - plane, 2 - cylinderical, 3 - spherical)
"""
return ((1. + g)**((g*n*(2. - 3*n + w + g*(-2. + n + w)))/((2. + (-1. + \
g)*n)*((-2. + g)*n + w)))*(1. - 2./(1. + g))**((-2.*n + w + g*w)/((-2. + \
g)*n + w))*(1. - v)**((g*(n - w))/((-2. + g)*n + w))*v**((2.*n)/(2. + n - \
w))*(-2. + 3*n - w - g*(-2. + n + w))**((n*(g**2.*(n**2. + (-2. + w)**2.) + \
2.*(-2. + n)*(2.*n - w) + g*(-4 - 3*n**2. - 2.*n*(-4 + w) + w**2.)))/((2. + \
(-1. + g)*n)*(2. + n - w)*((-2. + g)*n + w))))/(2.**((-2. + n + w)/(2. + n \
- w))*(-1. + g)*(2. + n - (2. + (-1. + g)*n)*v - w)**((n*(g**2.*(n**2. + \
(-2. + w)**2.) + 2.*(-2. + n)*(2.*n - w) + g*(-4 - 3*n**2. - 2.*n*(-4 + w) + \
w**2.)))/((2. + (-1. + g)*n)*(2. + n - w)*((-2. + g)*n + w))))
| 47.352941
| 84
| 0.326708
| 668
| 3,220
| 1.57485
| 0.052395
| 0.11597
| 0.068441
| 0.038023
| 0.942015
| 0.923004
| 0.909696
| 0.805133
| 0.772814
| 0.772814
| 0
| 0.1146
| 0.238509
| 3,220
| 67
| 85
| 48.059701
| 0.314437
| 0.17205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078947
| false
| 0
| 0
| 0
| 0.157895
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7944f86eef9f54bcb82b822d92fe5b30b5bf84ba
| 164
|
py
|
Python
|
src/week_7/data/io.py
|
Rutafar/text-analytics-lecture
|
3cdf5cf5b0d2ca85343c259beade0054f4be6e3d
|
[
"MIT"
] | 2
|
2018-03-05T20:45:11.000Z
|
2019-03-17T02:59:57.000Z
|
src/week_7/data/io.py
|
Rutafar/text-analytics-lecture
|
3cdf5cf5b0d2ca85343c259beade0054f4be6e3d
|
[
"MIT"
] | null | null | null |
src/week_7/data/io.py
|
Rutafar/text-analytics-lecture
|
3cdf5cf5b0d2ca85343c259beade0054f4be6e3d
|
[
"MIT"
] | 8
|
2018-03-02T14:12:35.000Z
|
2018-04-21T10:31:25.000Z
|
from utils.utils import data_from_pickle
def load_tweets():
"""Load cleaned tweets into session."""
return data_from_pickle('02_cleaned/trump_tweets.pkl')
| 27.333333
| 58
| 0.762195
| 24
| 164
| 4.916667
| 0.625
| 0.135593
| 0.237288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 0.134146
| 164
| 6
| 58
| 27.333333
| 0.816901
| 0.20122
| 0
| 0
| 0
| 0
| 0.214286
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
794a0906ad2331288671df2161069508cbb11720
| 32,856
|
py
|
Python
|
dlkit/abstract_osid/assessment_authoring/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/abstract_osid/assessment_authoring/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/abstract_osid/assessment_authoring/queries.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Implementations of assessment.authoring abstract base class queries."""
# pylint: disable=invalid-name
# Method names comply with OSID specification.
# pylint: disable=no-init
# Abstract classes do not define __init__.
# pylint: disable=too-few-public-methods
# Some interfaces are specified as 'markers' and include no methods.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
# pylint: disable=too-many-arguments
# Argument signature defined in specification.
# pylint: disable=duplicate-code
# All apparent duplicates have been inspected. They aren't.
import abc
class AssessmentPartQuery:
"""This is the query for searching assessment parts.
Each method match request produces an ``AND`` term while multiple
invocations of a method produces a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_assessment_id(self, assessment_id, match):
"""Sets the assessment ``Id`` for this query.
:param assessment_id: an assessment ``Id``
:type assessment_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``assessment_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_assessment_id_terms(self):
"""Clears all assessment ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
assessment_id_terms = property(fdel=clear_assessment_id_terms)
@abc.abstractmethod
def supports_assessment_query(self):
"""Tests if an ``AssessmentQuery`` is available.
:return: ``true`` if an assessment query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_query(self):
"""Gets the query for an assessment.
Multiple retrievals produce a nested ``OR`` term.
:return: the assessment query
:rtype: ``osid.assessment.AssessmentQuery``
:raise: ``Unimplemented`` -- ``supports_assessment_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_query()`` is ``true``.*
"""
return # osid.assessment.AssessmentQuery
assessment_query = property(fget=get_assessment_query)
@abc.abstractmethod
def clear_assessment_terms(self):
"""Clears all assessment terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
assessment_terms = property(fdel=clear_assessment_terms)
@abc.abstractmethod
def match_parent_assessment_part_id(self, assessment_part_id, match):
"""Sets the assessment part ``Id`` for this query.
:param assessment_part_id: an assessment part ``Id``
:type assessment_part_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``assessment_part_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_parent_assessment_part_id_terms(self):
"""Clears all assessment part ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
parent_assessment_part_id_terms = property(fdel=clear_parent_assessment_part_id_terms)
@abc.abstractmethod
def supports_parent_assessment_part_query(self):
"""Tests if an ``AssessmentPartQuery`` is available.
:return: ``true`` if an assessment part query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_parent_assessment_part_query(self):
"""Gets the query for an assessment part.
Multiple retrievals produce a nested ``OR`` term.
:return: the assessment part query
:rtype: ``osid.assessment.authoring.AssessmentPartQuery``
:raise: ``Unimplemented`` -- ``supports_parent_assessment_part_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_parent_assessment_part_query()`` is ``true``.*
"""
return # osid.assessment.authoring.AssessmentPartQuery
parent_assessment_part_query = property(fget=get_parent_assessment_part_query)
@abc.abstractmethod
def match_any_parent_assessment_part(self, match):
"""Matches assessment parts with any parent assessment part.
:param match: ``true`` to match assessment parts with any parent, ``false`` to match assessment parts with no parents
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_parent_assessment_part_terms(self):
"""Clears all assessment part terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
parent_assessment_part_terms = property(fdel=clear_parent_assessment_part_terms)
@abc.abstractmethod
def match_section(self, match):
"""Matches assessment parts that are also used as sections.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_section_terms(self):
"""Clears all section terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
section_terms = property(fdel=clear_section_terms)
@abc.abstractmethod
def match_weight(self, low, high, match):
"""Matches assessment parts that fall in between the given weights inclusive.
:param low: low end of range
:type low: ``cardinal``
:param high: high end of range
:type high: ``cardinal``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_weight(self, match):
"""Matches assessment parts with any weight assigned.
:param match: ``true`` to match assessment parts with any wieght, ``false`` to match assessment parts with no weight
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_weight_terms(self):
"""Clears all weight terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
weight_terms = property(fdel=clear_weight_terms)
@abc.abstractmethod
def match_allocated_time(self, low, high, match):
"""Matches assessment parts hose allocated time falls in between the given times inclusive.
:param low: low end of range
:type low: ``osid.calendaring.Duration``
:param high: high end of range
:type high: ``osid.calendaring.Duration``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_allocated_time(self, match):
"""Matches assessment parts with any time assigned.
:param match: ``true`` to match assessment parts with any alloocated time, ``false`` to match assessment parts with no allocated time
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_allocated_time_terms(self):
"""Clears all allocated time terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
allocated_time_terms = property(fdel=clear_allocated_time_terms)
@abc.abstractmethod
def match_child_assessment_part_id(self, assessment_part_id, match):
"""Sets the assessment part ``Id`` for this query.
:param assessment_part_id: an assessment part ``Id``
:type assessment_part_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``assessment_part_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_child_assessment_part_id_terms(self):
"""Clears all assessment part ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
child_assessment_part_id_terms = property(fdel=clear_child_assessment_part_id_terms)
@abc.abstractmethod
def supports_child_assessment_part_query(self):
"""Tests if an ``AssessmentPartQuery`` is available.
:return: ``true`` if an assessment part query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_child_assessment_part_query(self):
"""Gets the query for an assessment part.
Multiple retrievals produce a nested ``OR`` term.
:return: the assessment part query
:rtype: ``osid.assessment.authoring.AssessmentPartQuery``
:raise: ``Unimplemented`` -- ``supports_child_assessment_part_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_child_assessment_part_query()`` is ``true``.*
"""
return # osid.assessment.authoring.AssessmentPartQuery
child_assessment_part_query = property(fget=get_child_assessment_part_query)
@abc.abstractmethod
def match_any_child_assessment_part(self, match):
"""Matches assessment parts with any child assessment part.
:param match: ``true`` to match assessment parts with any children, ``false`` to match assessment parts with no children
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_child_assessment_part_terms(self):
"""Clears all assessment part terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
child_assessment_part_terms = property(fdel=clear_child_assessment_part_terms)
@abc.abstractmethod
def match_bank_id(self, bank_id, match):
"""Matches constrainers mapped to the bank.
:param bank_id: the bank ``Id``
:type bank_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_bank_id_terms(self):
"""Clears the bank ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
bank_id_terms = property(fdel=clear_bank_id_terms)
@abc.abstractmethod
def supports_bank_query(self):
"""Tests if an ``BankQuery`` is available.
:return: ``true`` if a bank query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bank_query(self):
"""Gets the query for a bank.
Multiple retrievals produce a nested ``OR`` term.
:return: the bank query
:rtype: ``osid.assessment.BankQuery``
:raise: ``Unimplemented`` -- ``supports_bank_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is ``true``.*
"""
return # osid.assessment.BankQuery
bank_query = property(fget=get_bank_query)
@abc.abstractmethod
def clear_bank_terms(self):
"""Clears the bank query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
bank_terms = property(fdel=clear_bank_terms)
@abc.abstractmethod
def get_assessment_part_query_record(self, assessment_part_record_type):
"""Gets the assessment part query record corresponding to the given ``AssessmentPart`` record ``Type``.
Multiple retrievals produce a nested ``OR`` term.
:param assessment_part_record_type: an assessment part record type
:type assessment_part_record_type: ``osid.type.Type``
:return: the assessment part query record
:rtype: ``osid.assessment.authoring.records.AssessmentPartQueryRecord``
:raise: ``NullArgument`` -- ``assessment_part_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(assessment_part_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.assessment.authoring.records.AssessmentPartQueryRecord
class SequenceRuleQuery:
"""This is the query for searching sequence rules.
Each method match specifies a ``AND`` term while multiple
invocations of the same method produce a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_assessment_part_id(self, assessment_part_id, match):
"""Sets the assessment part ``Id`` for this query.
:param assessment_part_id: an assessment part ``Id``
:type assessment_part_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``assessment_part_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_assessment_part_id_terms(self):
"""Clears all assessment part ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
assessment_part_id_terms = property(fdel=clear_assessment_part_id_terms)
@abc.abstractmethod
def supports_assessment_part_query(self):
"""Tests if an ``AssessmentPartQuery`` is available.
:return: ``true`` if an assessment part query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_assessment_part_query(self):
"""Gets the query for an assessment part.
Multiple retrievals produce a nested ``OR`` term.
:return: the assessment part query
:rtype: ``osid.assessment.authoring.AssessmentPartQuery``
:raise: ``Unimplemented`` -- ``supports_assessment_part_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_assessment_part_query()`` is ``true``.*
"""
return # osid.assessment.authoring.AssessmentPartQuery
assessment_part_query = property(fget=get_assessment_part_query)
@abc.abstractmethod
def clear_assessment_part_terms(self):
"""Clears all assessment part terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
assessment_part_terms = property(fdel=clear_assessment_part_terms)
@abc.abstractmethod
def match_next_assessment_part_id(self, assessment_part_id, match):
"""Sets the assessment part ``Id`` for this query.
:param assessment_part_id: an assessment part ``Id``
:type assessment_part_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``assessment_part_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_next_assessment_part_id_terms(self):
"""Clears all assessment part ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
next_assessment_part_id_terms = property(fdel=clear_next_assessment_part_id_terms)
@abc.abstractmethod
def supports_next_assessment_part_query(self):
"""Tests if an ``AssessmentPartQuery`` is available.
:return: ``true`` if an assessment part query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_next_assessment_part_query(self):
"""Gets the query for an assessment part.
Multiple retrievals produce a nested ``OR`` term.
:return: the assessment part query
:rtype: ``osid.assessment.authoring.AssessmentPartQuery``
:raise: ``Unimplemented`` -- ``supports_next_assessment_part_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_next_assessment_part_query()`` is ``true``.*
"""
return # osid.assessment.authoring.AssessmentPartQuery
next_assessment_part_query = property(fget=get_next_assessment_part_query)
@abc.abstractmethod
def clear_next_assessment_part_terms(self):
"""Clears all assessment part terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
next_assessment_part_terms = property(fdel=clear_next_assessment_part_terms)
@abc.abstractmethod
def match_minimum_score(self, low, high, match):
"""Matches minimum scores that fall in between the given scores inclusive.
:param low: low end of range
:type low: ``cardinal``
:param high: high end of range
:type high: ``cardinal``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_minimum_score(self, match):
"""Matches assessment parts with any minimum score assigned.
:param match: ``true`` to match assessment parts with any minimum score, ``false`` to match assessment parts with no minimum score
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_minimum_score_terms(self):
"""Clears all minimum score terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
minimum_score_terms = property(fdel=clear_minimum_score_terms)
@abc.abstractmethod
def match_maximum_score(self, low, high, match):
"""Matches maximum scores that fall in between the given scores inclusive.
:param low: low end of range
:type low: ``cardinal``
:param high: high end of range
:type high: ``cardinal``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``InvalidArgument`` -- ``high`` is less than ``low``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def match_any_maximum_score(self, match):
"""Matches assessment parts with any maximum score assigned.
:param match: ``true`` to match assessment parts with any maximum score, ``false`` to match assessment parts with no maximum score
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_maximum_score_terms(self):
"""Clears all maximum score terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
maximum_score_terms = property(fdel=clear_maximum_score_terms)
@abc.abstractmethod
def match_cumulative(self, match):
"""Matches cumulative rules.
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_cumulative_terms(self):
"""Clears all cumulative terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
cumulative_terms = property(fdel=clear_cumulative_terms)
@abc.abstractmethod
def match_applied_assessment_part_id(self, assessment_part_id, match):
"""Sets the assessment part ``Id`` for this query.
:param assessment_part_id: an assessment part ``Id``
:type assessment_part_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``assessment_part_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_applied_assessment_part_id_terms(self):
"""Clears all assessment part ``Id`` terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
applied_assessment_part_id_terms = property(fdel=clear_applied_assessment_part_id_terms)
@abc.abstractmethod
def supports_applied_assessment_part_query(self):
"""Tests if an ``AssessmentPartQuery`` is available.
:return: ``true`` if an assessment part query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_applied_assessment_part_query(self):
"""Gets the query for an assessment part.
Multiple retrievals produce a nested ``OR`` term.
:return: the assessment part query
:rtype: ``osid.assessment.authoring.AssessmentPartQuery``
:raise: ``Unimplemented`` -- ``supports_applied_assessment_part_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_applied_assessment_part_query()`` is ``true``.*
"""
return # osid.assessment.authoring.AssessmentPartQuery
applied_assessment_part_query = property(fget=get_applied_assessment_part_query)
@abc.abstractmethod
def match_any_applied_assessment_part(self, match):
"""Matches assessment parts with any applied assessment part.
:param match: ``true`` to match assessment parts with any applied assessment part, ``false`` to match assessment parts with no applied assessment parts
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_applied_assessment_part_terms(self):
"""Clears all assessment part terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
applied_assessment_part_terms = property(fdel=clear_applied_assessment_part_terms)
@abc.abstractmethod
def match_bank_id(self, bank_id, match):
"""Matches constrainers mapped to the bank.
:param bank_id: the bank ``Id``
:type bank_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_bank_id_terms(self):
"""Clears the bank ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
bank_id_terms = property(fdel=clear_bank_id_terms)
@abc.abstractmethod
def supports_bank_query(self):
"""Tests if an ``BankQuery`` is available.
:return: ``true`` if a bank query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bank_query(self):
"""Gets the query for a bank.
Multiple retrievals produce a nested ``OR`` term.
:return: the bank query
:rtype: ``osid.assessment.BankQuery``
:raise: ``Unimplemented`` -- ``supports_bank_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is ``true``.*
"""
return # osid.assessment.BankQuery
bank_query = property(fget=get_bank_query)
@abc.abstractmethod
def clear_bank_terms(self):
"""Clears the bank query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
bank_terms = property(fdel=clear_bank_terms)
@abc.abstractmethod
def get_sequence_rule_query_record(self, sequence_rule_record_type):
"""Gets the sequence rule query record corresponding to the given ``SequenceRule`` record ``Type``.
Multiple record retrievals produce a nested ``OR`` term.
:param sequence_rule_record_type: a sequence rule record type
:type sequence_rule_record_type: ``osid.type.Type``
:return: the sequence rule query record
:rtype: ``osid.assessment.authoring.records.SequenceRuleQueryRecord``
:raise: ``NullArgument`` -- ``sequence_rule_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(sequence_rule_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.assessment.authoring.records.SequenceRuleQueryRecord
class SequenceRuleEnablerQuery:
"""This is the query for searching sequence rule enablers.
Each method match specifies a ``AND`` term while multiple
invocations of the same method produce a nested ``OR``.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def match_ruled_sequence_rule_id(self, sequence_rule_id, match):
"""Matches enablers mapped to the sequence rule.
:param sequence_rule_id: the sequence rule ``Id``
:type sequence_rule_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``sequence_rule_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_ruled_sequence_rule_id_terms(self):
"""Clears the sequence rule ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
ruled_sequence_rule_id_terms = property(fdel=clear_ruled_sequence_rule_id_terms)
@abc.abstractmethod
def supports_ruled_sequence_rule_query(self):
"""Tests if a ``SequenceRuleQuery`` is available.
:return: ``true`` if a sequence rule query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_ruled_sequence_rule_query(self):
"""Gets the query for a sequence rule.
Multiple retrievals produce a nested ``OR`` term.
:return: the sequence rule query
:rtype: ``osid.assessment.authoring.SequenceRuleQuery``
:raise: ``Unimplemented`` -- ``supports_ruled_sequence_rule_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_ruled_sequence_rule_query()`` is ``true``.*
"""
return # osid.assessment.authoring.SequenceRuleQuery
ruled_sequence_rule_query = property(fget=get_ruled_sequence_rule_query)
@abc.abstractmethod
def match_any_ruled_sequence_rule(self, match):
"""Matches enablers mapped to any sequence rule.
:param match: ``true`` for enablers mapped to any sequence rule, ``false`` to match enablers mapped to no sequence rules
:type match: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_ruled_sequence_rule_terms(self):
"""Clears the sequence rule query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
ruled_sequence_rule_terms = property(fdel=clear_ruled_sequence_rule_terms)
@abc.abstractmethod
def match_bank_id(self, bank_id, match):
"""Matches enablers mapped to the bank.
:param bank_id: the bank ``Id``
:type bank_id: ``osid.id.Id``
:param match: ``true`` for a positive match, ``false`` for a negative match
:type match: ``boolean``
:raise: ``NullArgument`` -- ``bank_id`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
pass
@abc.abstractmethod
def clear_bank_id_terms(self):
"""Clears the bank ``Id`` query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
bank_id_terms = property(fdel=clear_bank_id_terms)
@abc.abstractmethod
def supports_bank_query(self):
"""Tests if an ``BankQuery`` is available.
:return: ``true`` if a bank query is available, ``false`` otherwise
:rtype: ``boolean``
*compliance: mandatory -- This method must be implemented.*
"""
return # boolean
@abc.abstractmethod
def get_bank_query(self):
"""Gets the query for a bank.
Multiple retrievals produce a nested ``OR`` term.
:return: the bank query
:rtype: ``osid.assessment.BankQuery``
:raise: ``Unimplemented`` -- ``supports_bank_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_bank_query()`` is ``true``.*
"""
return # osid.assessment.BankQuery
bank_query = property(fget=get_bank_query)
@abc.abstractmethod
def clear_bank_terms(self):
"""Clears the bank query terms.
*compliance: mandatory -- This method must be implemented.*
"""
pass
bank_terms = property(fdel=clear_bank_terms)
@abc.abstractmethod
def get_sequence_rule_enabler_query_record(self, sequence_rule_enabler_record_type):
"""Gets the sequence rule enabler query record corresponding to the given ``SequenceRuleEnabler`` record ``Type``.
Multiple record retrievals produce a nested ``OR`` term.
:param sequence_rule_enabler_record_type: a sequence rule enabler record type
:type sequence_rule_enabler_record_type: ``osid.type.Type``
:return: the sequence rule enabler query record
:rtype: ``osid.assessment.authoring.records.SequenceRuleEnablerQueryRecord``
:raise: ``NullArgument`` -- ``sequence_rule_enabler_record_type`` is ``null``
:raise: ``OperationFailed`` -- unable to complete request
:raise: ``Unsupported`` -- ``has_record_type(sequence_rule_enabler_record_type)`` is ``false``
*compliance: mandatory -- This method must be implemented.*
"""
return # osid.assessment.authoring.records.SequenceRuleEnablerQueryRecord
| 29.869091
| 159
| 0.648192
| 3,694
| 32,856
| 5.59556
| 0.052247
| 0.092114
| 0.070634
| 0.056507
| 0.901306
| 0.849976
| 0.795839
| 0.724625
| 0.694872
| 0.668457
| 0
| 0
| 0.2465
| 32,856
| 1,099
| 160
| 29.896269
| 0.834915
| 0.601595
| 0
| 0.660305
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.278626
| false
| 0.19084
| 0.003817
| 0
| 0.530534
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
799bcd7053a8d18cfbe4bf6f1135ba84cf85a976
| 225,032
|
py
|
Python
|
sdk/python/pulumi_alicloud/alb/outputs.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/alb/outputs.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/alb/outputs.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'AclAclEntry',
'ListenerAccessLogTracingConfig',
'ListenerAclConfig',
'ListenerAclConfigAclRelation',
'ListenerCertificate',
'ListenerDefaultAction',
'ListenerDefaultActionForwardGroupConfig',
'ListenerDefaultActionForwardGroupConfigServerGroupTuple',
'ListenerQuicConfig',
'ListenerXforwardedForConfig',
'LoadBalancerAccessLogConfig',
'LoadBalancerLoadBalancerBillingConfig',
'LoadBalancerModificationProtectionConfig',
'LoadBalancerZoneMapping',
'RuleRuleAction',
'RuleRuleActionFixedResponseConfig',
'RuleRuleActionForwardGroupConfig',
'RuleRuleActionForwardGroupConfigServerGroupTuple',
'RuleRuleActionInsertHeaderConfig',
'RuleRuleActionRedirectConfig',
'RuleRuleActionRewriteConfig',
'RuleRuleCondition',
'RuleRuleConditionCookieConfig',
'RuleRuleConditionCookieConfigValue',
'RuleRuleConditionHeaderConfig',
'RuleRuleConditionHostConfig',
'RuleRuleConditionMethodConfig',
'RuleRuleConditionPathConfig',
'RuleRuleConditionQueryStringConfig',
'RuleRuleConditionQueryStringConfigValue',
'ServerGroupHealthCheckConfig',
'ServerGroupServer',
'ServerGroupStickySessionConfig',
'GetAclsAclResult',
'GetAclsAclAclEntryResult',
'GetHealthCheckTemplatesTemplateResult',
'GetListenersListenerResult',
'GetListenersListenerAccessLogTracingConfigResult',
'GetListenersListenerAclConfigResult',
'GetListenersListenerAclConfigAclRelationResult',
'GetListenersListenerCertificateResult',
'GetListenersListenerDefaultActionResult',
'GetListenersListenerDefaultActionForwardGroupConfigResult',
'GetListenersListenerDefaultActionForwardGroupConfigServerGroupTupleResult',
'GetListenersListenerQuicConfigResult',
'GetListenersListenerXforwardedForConfigResult',
'GetLoadBalancersBalancerResult',
'GetLoadBalancersBalancerAccessLogConfigResult',
'GetLoadBalancersBalancerDeletionProtectionConfigResult',
'GetLoadBalancersBalancerLoadBalancerBillingConfigResult',
'GetLoadBalancersBalancerLoadBalancerOperationLockResult',
'GetLoadBalancersBalancerModificationProtectionConfigResult',
'GetLoadBalancersBalancerZoneMappingResult',
'GetLoadBalancersBalancerZoneMappingLoadBalancerAddressResult',
'GetRulesRuleResult',
'GetRulesRuleRuleActionResult',
'GetRulesRuleRuleActionFixedResponseConfigResult',
'GetRulesRuleRuleActionForwardGroupConfigResult',
'GetRulesRuleRuleActionForwardGroupConfigServerGroupTupleResult',
'GetRulesRuleRuleActionInsertHeaderConfigResult',
'GetRulesRuleRuleActionRedirectConfigResult',
'GetRulesRuleRuleActionRewriteConfigResult',
'GetRulesRuleRuleConditionResult',
'GetRulesRuleRuleConditionCookieConfigResult',
'GetRulesRuleRuleConditionCookieConfigValueResult',
'GetRulesRuleRuleConditionHeaderConfigResult',
'GetRulesRuleRuleConditionHostConfigResult',
'GetRulesRuleRuleConditionMethodConfigResult',
'GetRulesRuleRuleConditionPathConfigResult',
'GetRulesRuleRuleConditionQueryStringConfigResult',
'GetRulesRuleRuleConditionQueryStringConfigValueResult',
'GetSecurityPoliciesPolicyResult',
'GetServerGroupsGroupResult',
'GetServerGroupsGroupHealthCheckConfigResult',
'GetServerGroupsGroupServerResult',
'GetServerGroupsGroupStickySessionConfigResult',
'GetZonesZoneResult',
]
@pulumi.output_type
class AclAclEntry(dict):
def __init__(__self__, *,
description: Optional[str] = None,
entry: Optional[str] = None,
status: Optional[str] = None):
"""
:param str description: The description of the ACL entry. The description must be 1 to 256 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.),and underscores (_). It can also contain Chinese characters.
:param str entry: The IP address for the ACL entry.
:param str status: The state of the ACL. Valid values:`Provisioning` , `Available` and `Configuring`. `Provisioning`: The ACL is being created. `Available`: The ACL is available. `Configuring`: The ACL is being configured.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if entry is not None:
pulumi.set(__self__, "entry", entry)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The description of the ACL entry. The description must be 1 to 256 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.),and underscores (_). It can also contain Chinese characters.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def entry(self) -> Optional[str]:
"""
The IP address for the ACL entry.
"""
return pulumi.get(self, "entry")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The state of the ACL. Valid values:`Provisioning` , `Available` and `Configuring`. `Provisioning`: The ACL is being created. `Available`: The ACL is available. `Configuring`: The ACL is being configured.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class ListenerAccessLogTracingConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "tracingEnabled":
suggest = "tracing_enabled"
elif key == "tracingSample":
suggest = "tracing_sample"
elif key == "tracingType":
suggest = "tracing_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerAccessLogTracingConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerAccessLogTracingConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerAccessLogTracingConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
tracing_enabled: Optional[bool] = None,
tracing_sample: Optional[int] = None,
tracing_type: Optional[str] = None):
"""
:param bool tracing_enabled: Xtrace Function. Value: `True` Or `False` . Default Value: `False`.
:param int tracing_sample: Xtrace Sampling Rate. Value: `1` to `10000`.
:param str tracing_type: Xtrace Type Value Is `Zipkin`.
"""
if tracing_enabled is not None:
pulumi.set(__self__, "tracing_enabled", tracing_enabled)
if tracing_sample is not None:
pulumi.set(__self__, "tracing_sample", tracing_sample)
if tracing_type is not None:
pulumi.set(__self__, "tracing_type", tracing_type)
@property
@pulumi.getter(name="tracingEnabled")
def tracing_enabled(self) -> Optional[bool]:
"""
Xtrace Function. Value: `True` Or `False` . Default Value: `False`.
"""
return pulumi.get(self, "tracing_enabled")
@property
@pulumi.getter(name="tracingSample")
def tracing_sample(self) -> Optional[int]:
"""
Xtrace Sampling Rate. Value: `1` to `10000`.
"""
return pulumi.get(self, "tracing_sample")
@property
@pulumi.getter(name="tracingType")
def tracing_type(self) -> Optional[str]:
"""
Xtrace Type Value Is `Zipkin`.
"""
return pulumi.get(self, "tracing_type")
@pulumi.output_type
class ListenerAclConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "aclRelations":
suggest = "acl_relations"
elif key == "aclType":
suggest = "acl_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerAclConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerAclConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerAclConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
acl_relations: Optional[Sequence['outputs.ListenerAclConfigAclRelation']] = None,
acl_type: Optional[str] = None):
"""
:param Sequence['ListenerAclConfigAclRelationArgs'] acl_relations: The ACLs that are associated with the listener.
:param str acl_type: The type of the ACL. Valid values: `White` Or `Black`. `White`: specifies the ACL as a whitelist. Only requests from the IP addresses or CIDR blocks in the ACL are forwarded. Whitelists apply to scenarios where only specific IP addresses are allowed to access an application. Risks may occur if the whitelist is improperly set. After you set a whitelist for an Application Load Balancer (ALB) listener, only requests from IP addresses that are added to the whitelist are distributed by the listener. If the whitelist is enabled without IP addresses specified, the ALB listener does not forward requests. `Black`: All requests from the IP addresses or CIDR blocks in the ACL are denied. The blacklist is used to prevent specified IP addresses from accessing an application. If the blacklist is enabled but the corresponding ACL does not contain IP addresses, the ALB listener forwards all requests.
"""
if acl_relations is not None:
pulumi.set(__self__, "acl_relations", acl_relations)
if acl_type is not None:
pulumi.set(__self__, "acl_type", acl_type)
@property
@pulumi.getter(name="aclRelations")
def acl_relations(self) -> Optional[Sequence['outputs.ListenerAclConfigAclRelation']]:
"""
The ACLs that are associated with the listener.
"""
return pulumi.get(self, "acl_relations")
@property
@pulumi.getter(name="aclType")
def acl_type(self) -> Optional[str]:
"""
The type of the ACL. Valid values: `White` Or `Black`. `White`: specifies the ACL as a whitelist. Only requests from the IP addresses or CIDR blocks in the ACL are forwarded. Whitelists apply to scenarios where only specific IP addresses are allowed to access an application. Risks may occur if the whitelist is improperly set. After you set a whitelist for an Application Load Balancer (ALB) listener, only requests from IP addresses that are added to the whitelist are distributed by the listener. If the whitelist is enabled without IP addresses specified, the ALB listener does not forward requests. `Black`: All requests from the IP addresses or CIDR blocks in the ACL are denied. The blacklist is used to prevent specified IP addresses from accessing an application. If the blacklist is enabled but the corresponding ACL does not contain IP addresses, the ALB listener forwards all requests.
"""
return pulumi.get(self, "acl_type")
@pulumi.output_type
class ListenerAclConfigAclRelation(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "aclId":
suggest = "acl_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerAclConfigAclRelation. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerAclConfigAclRelation.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerAclConfigAclRelation.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
acl_id: Optional[str] = None,
status: Optional[str] = None):
"""
:param str acl_id: Snooping Binding of the Access Policy Group ID List.
:param str status: The state of the listener. Valid Values: `Running` Or `Stopped`. Valid values: `Running`: The listener is running. `Stopped`: The listener is stopped.
"""
if acl_id is not None:
pulumi.set(__self__, "acl_id", acl_id)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="aclId")
def acl_id(self) -> Optional[str]:
"""
Snooping Binding of the Access Policy Group ID List.
"""
return pulumi.get(self, "acl_id")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The state of the listener. Valid Values: `Running` Or `Stopped`. Valid values: `Running`: The listener is running. `Stopped`: The listener is stopped.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class ListenerCertificate(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "certificateId":
suggest = "certificate_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerCertificate. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerCertificate.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerCertificate.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
certificate_id: Optional[str] = None):
if certificate_id is not None:
pulumi.set(__self__, "certificate_id", certificate_id)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> Optional[str]:
return pulumi.get(self, "certificate_id")
@pulumi.output_type
class ListenerDefaultAction(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "forwardGroupConfig":
suggest = "forward_group_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultAction. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultAction.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultAction.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
forward_group_config: 'outputs.ListenerDefaultActionForwardGroupConfig',
type: str):
"""
:param 'ListenerDefaultActionForwardGroupConfigArgs' forward_group_config: The configurations of the actions. This parameter is required if Type is set to FowardGroup.
:param str type: Action Type.
"""
pulumi.set(__self__, "forward_group_config", forward_group_config)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="forwardGroupConfig")
def forward_group_config(self) -> 'outputs.ListenerDefaultActionForwardGroupConfig':
"""
The configurations of the actions. This parameter is required if Type is set to FowardGroup.
"""
return pulumi.get(self, "forward_group_config")
@property
@pulumi.getter
def type(self) -> str:
"""
Action Type.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class ListenerDefaultActionForwardGroupConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverGroupTuples":
suggest = "server_group_tuples"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionForwardGroupConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionForwardGroupConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionForwardGroupConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
server_group_tuples: Sequence['outputs.ListenerDefaultActionForwardGroupConfigServerGroupTuple']):
"""
:param Sequence['ListenerDefaultActionForwardGroupConfigServerGroupTupleArgs'] server_group_tuples: The destination server group to which requests are forwarded.
"""
pulumi.set(__self__, "server_group_tuples", server_group_tuples)
@property
@pulumi.getter(name="serverGroupTuples")
def server_group_tuples(self) -> Sequence['outputs.ListenerDefaultActionForwardGroupConfigServerGroupTuple']:
"""
The destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_tuples")
@pulumi.output_type
class ListenerDefaultActionForwardGroupConfigServerGroupTuple(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverGroupId":
suggest = "server_group_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerDefaultActionForwardGroupConfigServerGroupTuple. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerDefaultActionForwardGroupConfigServerGroupTuple.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerDefaultActionForwardGroupConfigServerGroupTuple.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
server_group_id: str):
"""
:param str server_group_id: The ID of the destination server group to which requests are forwarded.
"""
pulumi.set(__self__, "server_group_id", server_group_id)
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> str:
"""
The ID of the destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_id")
@pulumi.output_type
class ListenerQuicConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "quicListenerId":
suggest = "quic_listener_id"
elif key == "quicUpgradeEnabled":
suggest = "quic_upgrade_enabled"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerQuicConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerQuicConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerQuicConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
quic_listener_id: Optional[str] = None,
quic_upgrade_enabled: Optional[bool] = None):
"""
:param str quic_listener_id: There Is a Need to Correlate the QuIC Listener ID. The Https Listener, in Effect at the Time. quicupgradeenabled True When Required.
:param bool quic_upgrade_enabled: Indicates Whether to Enable the QuIC Upgrade.
"""
if quic_listener_id is not None:
pulumi.set(__self__, "quic_listener_id", quic_listener_id)
if quic_upgrade_enabled is not None:
pulumi.set(__self__, "quic_upgrade_enabled", quic_upgrade_enabled)
@property
@pulumi.getter(name="quicListenerId")
def quic_listener_id(self) -> Optional[str]:
"""
There Is a Need to Correlate the QuIC Listener ID. The Https Listener, in Effect at the Time. quicupgradeenabled True When Required.
"""
return pulumi.get(self, "quic_listener_id")
@property
@pulumi.getter(name="quicUpgradeEnabled")
def quic_upgrade_enabled(self) -> Optional[bool]:
"""
Indicates Whether to Enable the QuIC Upgrade.
"""
return pulumi.get(self, "quic_upgrade_enabled")
@pulumi.output_type
class ListenerXforwardedForConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "xforwardedforclientcertIssuerdnalias":
suggest = "xforwardedforclientcert_issuerdnalias"
elif key == "xforwardedforclientcertIssuerdnenabled":
suggest = "xforwardedforclientcert_issuerdnenabled"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ListenerXforwardedForConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ListenerXforwardedForConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ListenerXforwardedForConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
xforwardedforclientcert_issuerdnalias: Optional[str] = None,
xforwardedforclientcert_issuerdnenabled: Optional[bool] = None,
xforwardedforclientcertclientverifyalias: Optional[str] = None,
xforwardedforclientcertclientverifyenabled: Optional[bool] = None,
xforwardedforclientcertfingerprintalias: Optional[str] = None,
xforwardedforclientcertfingerprintenabled: Optional[bool] = None,
xforwardedforclientcertsubjectdnalias: Optional[str] = None,
xforwardedforclientcertsubjectdnenabled: Optional[bool] = None,
xforwardedforclientsrcportenabled: Optional[bool] = None,
xforwardedforenabled: Optional[bool] = None,
xforwardedforprotoenabled: Optional[bool] = None,
xforwardedforslbidenabled: Optional[bool] = None,
xforwardedforslbportenabled: Optional[bool] = None):
"""
:param str xforwardedforclientcert_issuerdnalias: The Custom Header Field Names Only When `xforwardedforclientcert_issuerdnenabled`, Which Evaluates to True When the Entry into Force of.
:param bool xforwardedforclientcert_issuerdnenabled: Indicates Whether the `X-Forwarded-Clientcert-issuerdn` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate after the Manifests Are Signed, the Publisher Information.
:param str xforwardedforclientcertclientverifyalias: The Custom Header Field Names Only When `xforwardedforclientcertclientverifyenabled` Has a Value of True, this Value Will Not Take Effect until.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
:param bool xforwardedforclientcertclientverifyenabled: Indicates Whether the `X-Forwarded-Clientcert-clientverify` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate to Verify the Results.
:param str xforwardedforclientcertfingerprintalias: The Custom Header Field Names Only When `xforwardedforclientcertfingerprintenabled`, Which Evaluates to True When the Entry into Force of.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
:param bool xforwardedforclientcertfingerprintenabled: Indicates Whether the `X-Forwarded-Clientcert-fingerprint` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate Fingerprint Value.
:param str xforwardedforclientcertsubjectdnalias: The name of the custom header. This parameter is valid only if `xforwardedforclientcertsubjectdnenabled` is set to true. The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
:param bool xforwardedforclientcertsubjectdnenabled: Specifies whether to use the `X-Forwarded-Clientcert-subjectdn` header field to obtain information about the owner of the ALB client certificate. Valid values: true and false. Default value: false.
:param bool xforwardedforclientsrcportenabled: Indicates Whether the X-Forwarded-Client-Port Header Field Is Used to Obtain Access to Server Load Balancer Instances to the Client, and Those of the Ports.
:param bool xforwardedforenabled: Whether to Enable by X-Forwarded-For Header Field Is Used to Obtain the Client IP Addresses.
:param bool xforwardedforprotoenabled: Indicates Whether the X-Forwarded-Proto Header Field Is Used to Obtain the Server Load Balancer Instance Snooping Protocols.
:param bool xforwardedforslbidenabled: Indicates Whether the SLB-ID Header Field Is Used to Obtain the Load Balancing Instance Id.
:param bool xforwardedforslbportenabled: Indicates Whether the X-Forwarded-Port Header Field Is Used to Obtain the Server Load Balancer Instance Listening Port.
"""
if xforwardedforclientcert_issuerdnalias is not None:
pulumi.set(__self__, "xforwardedforclientcert_issuerdnalias", xforwardedforclientcert_issuerdnalias)
if xforwardedforclientcert_issuerdnenabled is not None:
pulumi.set(__self__, "xforwardedforclientcert_issuerdnenabled", xforwardedforclientcert_issuerdnenabled)
if xforwardedforclientcertclientverifyalias is not None:
pulumi.set(__self__, "xforwardedforclientcertclientverifyalias", xforwardedforclientcertclientverifyalias)
if xforwardedforclientcertclientverifyenabled is not None:
pulumi.set(__self__, "xforwardedforclientcertclientverifyenabled", xforwardedforclientcertclientverifyenabled)
if xforwardedforclientcertfingerprintalias is not None:
pulumi.set(__self__, "xforwardedforclientcertfingerprintalias", xforwardedforclientcertfingerprintalias)
if xforwardedforclientcertfingerprintenabled is not None:
pulumi.set(__self__, "xforwardedforclientcertfingerprintenabled", xforwardedforclientcertfingerprintenabled)
if xforwardedforclientcertsubjectdnalias is not None:
pulumi.set(__self__, "xforwardedforclientcertsubjectdnalias", xforwardedforclientcertsubjectdnalias)
if xforwardedforclientcertsubjectdnenabled is not None:
pulumi.set(__self__, "xforwardedforclientcertsubjectdnenabled", xforwardedforclientcertsubjectdnenabled)
if xforwardedforclientsrcportenabled is not None:
pulumi.set(__self__, "xforwardedforclientsrcportenabled", xforwardedforclientsrcportenabled)
if xforwardedforenabled is not None:
pulumi.set(__self__, "xforwardedforenabled", xforwardedforenabled)
if xforwardedforprotoenabled is not None:
pulumi.set(__self__, "xforwardedforprotoenabled", xforwardedforprotoenabled)
if xforwardedforslbidenabled is not None:
pulumi.set(__self__, "xforwardedforslbidenabled", xforwardedforslbidenabled)
if xforwardedforslbportenabled is not None:
pulumi.set(__self__, "xforwardedforslbportenabled", xforwardedforslbportenabled)
@property
@pulumi.getter(name="xforwardedforclientcertIssuerdnalias")
def xforwardedforclientcert_issuerdnalias(self) -> Optional[str]:
"""
The Custom Header Field Names Only When `xforwardedforclientcert_issuerdnenabled`, Which Evaluates to True When the Entry into Force of.
"""
return pulumi.get(self, "xforwardedforclientcert_issuerdnalias")
@property
@pulumi.getter(name="xforwardedforclientcertIssuerdnenabled")
def xforwardedforclientcert_issuerdnenabled(self) -> Optional[bool]:
"""
Indicates Whether the `X-Forwarded-Clientcert-issuerdn` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate after the Manifests Are Signed, the Publisher Information.
"""
return pulumi.get(self, "xforwardedforclientcert_issuerdnenabled")
@property
@pulumi.getter
def xforwardedforclientcertclientverifyalias(self) -> Optional[str]:
"""
The Custom Header Field Names Only When `xforwardedforclientcertclientverifyenabled` Has a Value of True, this Value Will Not Take Effect until.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
"""
return pulumi.get(self, "xforwardedforclientcertclientverifyalias")
@property
@pulumi.getter
def xforwardedforclientcertclientverifyenabled(self) -> Optional[bool]:
"""
Indicates Whether the `X-Forwarded-Clientcert-clientverify` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate to Verify the Results.
"""
return pulumi.get(self, "xforwardedforclientcertclientverifyenabled")
@property
@pulumi.getter
def xforwardedforclientcertfingerprintalias(self) -> Optional[str]:
"""
The Custom Header Field Names Only When `xforwardedforclientcertfingerprintenabled`, Which Evaluates to True When the Entry into Force of.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
"""
return pulumi.get(self, "xforwardedforclientcertfingerprintalias")
@property
@pulumi.getter
def xforwardedforclientcertfingerprintenabled(self) -> Optional[bool]:
"""
Indicates Whether the `X-Forwarded-Clientcert-fingerprint` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate Fingerprint Value.
"""
return pulumi.get(self, "xforwardedforclientcertfingerprintenabled")
@property
@pulumi.getter
def xforwardedforclientcertsubjectdnalias(self) -> Optional[str]:
"""
The name of the custom header. This parameter is valid only if `xforwardedforclientcertsubjectdnenabled` is set to true. The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
"""
return pulumi.get(self, "xforwardedforclientcertsubjectdnalias")
@property
@pulumi.getter
def xforwardedforclientcertsubjectdnenabled(self) -> Optional[bool]:
"""
Specifies whether to use the `X-Forwarded-Clientcert-subjectdn` header field to obtain information about the owner of the ALB client certificate. Valid values: true and false. Default value: false.
"""
return pulumi.get(self, "xforwardedforclientcertsubjectdnenabled")
@property
@pulumi.getter
def xforwardedforclientsrcportenabled(self) -> Optional[bool]:
"""
Indicates Whether the X-Forwarded-Client-Port Header Field Is Used to Obtain Access to Server Load Balancer Instances to the Client, and Those of the Ports.
"""
return pulumi.get(self, "xforwardedforclientsrcportenabled")
@property
@pulumi.getter
def xforwardedforenabled(self) -> Optional[bool]:
"""
Whether to Enable by X-Forwarded-For Header Field Is Used to Obtain the Client IP Addresses.
"""
return pulumi.get(self, "xforwardedforenabled")
@property
@pulumi.getter
def xforwardedforprotoenabled(self) -> Optional[bool]:
"""
Indicates Whether the X-Forwarded-Proto Header Field Is Used to Obtain the Server Load Balancer Instance Snooping Protocols.
"""
return pulumi.get(self, "xforwardedforprotoenabled")
@property
@pulumi.getter
def xforwardedforslbidenabled(self) -> Optional[bool]:
"""
Indicates Whether the SLB-ID Header Field Is Used to Obtain the Load Balancing Instance Id.
"""
return pulumi.get(self, "xforwardedforslbidenabled")
@property
@pulumi.getter
def xforwardedforslbportenabled(self) -> Optional[bool]:
"""
Indicates Whether the X-Forwarded-Port Header Field Is Used to Obtain the Server Load Balancer Instance Listening Port.
"""
return pulumi.get(self, "xforwardedforslbportenabled")
@pulumi.output_type
class LoadBalancerAccessLogConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "logProject":
suggest = "log_project"
elif key == "logStore":
suggest = "log_store"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LoadBalancerAccessLogConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LoadBalancerAccessLogConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LoadBalancerAccessLogConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
log_project: Optional[str] = None,
log_store: Optional[str] = None):
"""
:param str log_project: The log service that access logs are shipped to.
:param str log_store: The log service that access logs are shipped to.
"""
if log_project is not None:
pulumi.set(__self__, "log_project", log_project)
if log_store is not None:
pulumi.set(__self__, "log_store", log_store)
@property
@pulumi.getter(name="logProject")
def log_project(self) -> Optional[str]:
"""
The log service that access logs are shipped to.
"""
return pulumi.get(self, "log_project")
@property
@pulumi.getter(name="logStore")
def log_store(self) -> Optional[str]:
"""
The log service that access logs are shipped to.
"""
return pulumi.get(self, "log_store")
@pulumi.output_type
class LoadBalancerLoadBalancerBillingConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "payType":
suggest = "pay_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LoadBalancerLoadBalancerBillingConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LoadBalancerLoadBalancerBillingConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LoadBalancerLoadBalancerBillingConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
pay_type: str):
"""
:param str pay_type: The billing method of the ALB instance. Valid value: `PayAsYouGo`.
"""
pulumi.set(__self__, "pay_type", pay_type)
@property
@pulumi.getter(name="payType")
def pay_type(self) -> str:
"""
The billing method of the ALB instance. Valid value: `PayAsYouGo`.
"""
return pulumi.get(self, "pay_type")
@pulumi.output_type
class LoadBalancerModificationProtectionConfig(dict):
def __init__(__self__, *,
reason: Optional[str] = None,
status: Optional[str] = None):
"""
:param str reason: The reason for modification protection. This parameter must be 2 to 128 characters in length, and can contain letters, digits, periods, underscores, and hyphens. The reason must start with a letter. This parameter is required only if `ModificationProtectionStatus` is set to `ConsoleProtection`.
:param str status: Specifies whether to enable the configuration read-only mode for the ALB instance. Valid values: `NonProtection` and `ConsoleProtection`.
"""
if reason is not None:
pulumi.set(__self__, "reason", reason)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def reason(self) -> Optional[str]:
"""
The reason for modification protection. This parameter must be 2 to 128 characters in length, and can contain letters, digits, periods, underscores, and hyphens. The reason must start with a letter. This parameter is required only if `ModificationProtectionStatus` is set to `ConsoleProtection`.
"""
return pulumi.get(self, "reason")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Specifies whether to enable the configuration read-only mode for the ALB instance. Valid values: `NonProtection` and `ConsoleProtection`.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class LoadBalancerZoneMapping(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "vswitchId":
suggest = "vswitch_id"
elif key == "zoneId":
suggest = "zone_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in LoadBalancerZoneMapping. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
LoadBalancerZoneMapping.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
LoadBalancerZoneMapping.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
vswitch_id: str,
zone_id: str):
"""
:param str vswitch_id: The ID of the vSwitch that corresponds to the zone. Each zone can use only one vSwitch and subnet.
:param str zone_id: The ID of the zone to which the ALB instance belongs.
"""
pulumi.set(__self__, "vswitch_id", vswitch_id)
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="vswitchId")
def vswitch_id(self) -> str:
"""
The ID of the vSwitch that corresponds to the zone. Each zone can use only one vSwitch and subnet.
"""
return pulumi.get(self, "vswitch_id")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> str:
"""
The ID of the zone to which the ALB instance belongs.
"""
return pulumi.get(self, "zone_id")
@pulumi.output_type
class RuleRuleAction(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "fixedResponseConfig":
suggest = "fixed_response_config"
elif key == "forwardGroupConfig":
suggest = "forward_group_config"
elif key == "insertHeaderConfig":
suggest = "insert_header_config"
elif key == "redirectConfig":
suggest = "redirect_config"
elif key == "rewriteConfig":
suggest = "rewrite_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleAction. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleAction.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleAction.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
order: int,
type: str,
fixed_response_config: Optional['outputs.RuleRuleActionFixedResponseConfig'] = None,
forward_group_config: Optional['outputs.RuleRuleActionForwardGroupConfig'] = None,
insert_header_config: Optional['outputs.RuleRuleActionInsertHeaderConfig'] = None,
redirect_config: Optional['outputs.RuleRuleActionRedirectConfig'] = None,
rewrite_config: Optional['outputs.RuleRuleActionRewriteConfig'] = None):
"""
:param int order: The order of the forwarding rule actions. Valid values: 1 to 50000. The actions are performed in ascending order. You cannot leave this parameter empty. Each value must be unique.
:param str type: The action. Valid values: `ForwardGroup`, `Redirect`, `FixedResponse`, `Rewrite`, `InsertHeader`. **Note:** The preceding actions can be classified into two types: `FinalType`: A forwarding rule can contain only one `FinalType` action, which is executed last. This type of action can contain only one `ForwardGroup`, `Redirect` or `FixedResponse` action. `ExtType`: A forwarding rule can contain one or more `ExtType` actions, which are executed before `FinalType` actions and need to coexist with the `FinalType` actions. This type of action can contain multiple `InsertHeader` actions or one `Rewrite` action.
:param 'RuleRuleActionFixedResponseConfigArgs' fixed_response_config: The configuration of the fixed response. See the following `Block fixed_response_config`.
:param 'RuleRuleActionForwardGroupConfigArgs' forward_group_config: The forward response action within ALB. See the following `Block forward_group_config`.
:param 'RuleRuleActionInsertHeaderConfigArgs' insert_header_config: The configuration of the inserted header field. See the following `Block insert_header_config`.
:param 'RuleRuleActionRedirectConfigArgs' redirect_config: The configuration of the external redirect action. See the following `Block redirect_config`.
:param 'RuleRuleActionRewriteConfigArgs' rewrite_config: The redirect action within ALB. See the following `Block rewrite_config`.
"""
pulumi.set(__self__, "order", order)
pulumi.set(__self__, "type", type)
if fixed_response_config is not None:
pulumi.set(__self__, "fixed_response_config", fixed_response_config)
if forward_group_config is not None:
pulumi.set(__self__, "forward_group_config", forward_group_config)
if insert_header_config is not None:
pulumi.set(__self__, "insert_header_config", insert_header_config)
if redirect_config is not None:
pulumi.set(__self__, "redirect_config", redirect_config)
if rewrite_config is not None:
pulumi.set(__self__, "rewrite_config", rewrite_config)
@property
@pulumi.getter
def order(self) -> int:
"""
The order of the forwarding rule actions. Valid values: 1 to 50000. The actions are performed in ascending order. You cannot leave this parameter empty. Each value must be unique.
"""
return pulumi.get(self, "order")
@property
@pulumi.getter
def type(self) -> str:
"""
The action. Valid values: `ForwardGroup`, `Redirect`, `FixedResponse`, `Rewrite`, `InsertHeader`. **Note:** The preceding actions can be classified into two types: `FinalType`: A forwarding rule can contain only one `FinalType` action, which is executed last. This type of action can contain only one `ForwardGroup`, `Redirect` or `FixedResponse` action. `ExtType`: A forwarding rule can contain one or more `ExtType` actions, which are executed before `FinalType` actions and need to coexist with the `FinalType` actions. This type of action can contain multiple `InsertHeader` actions or one `Rewrite` action.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="fixedResponseConfig")
def fixed_response_config(self) -> Optional['outputs.RuleRuleActionFixedResponseConfig']:
"""
The configuration of the fixed response. See the following `Block fixed_response_config`.
"""
return pulumi.get(self, "fixed_response_config")
@property
@pulumi.getter(name="forwardGroupConfig")
def forward_group_config(self) -> Optional['outputs.RuleRuleActionForwardGroupConfig']:
"""
The forward response action within ALB. See the following `Block forward_group_config`.
"""
return pulumi.get(self, "forward_group_config")
@property
@pulumi.getter(name="insertHeaderConfig")
def insert_header_config(self) -> Optional['outputs.RuleRuleActionInsertHeaderConfig']:
"""
The configuration of the inserted header field. See the following `Block insert_header_config`.
"""
return pulumi.get(self, "insert_header_config")
@property
@pulumi.getter(name="redirectConfig")
def redirect_config(self) -> Optional['outputs.RuleRuleActionRedirectConfig']:
"""
The configuration of the external redirect action. See the following `Block redirect_config`.
"""
return pulumi.get(self, "redirect_config")
@property
@pulumi.getter(name="rewriteConfig")
def rewrite_config(self) -> Optional['outputs.RuleRuleActionRewriteConfig']:
"""
The redirect action within ALB. See the following `Block rewrite_config`.
"""
return pulumi.get(self, "rewrite_config")
@pulumi.output_type
class RuleRuleActionFixedResponseConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "contentType":
suggest = "content_type"
elif key == "httpCode":
suggest = "http_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleActionFixedResponseConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleActionFixedResponseConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleActionFixedResponseConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
content: str,
content_type: Optional[str] = None,
http_code: Optional[str] = None):
"""
:param str content: The fixed response. The response cannot exceed 1 KB in size and can contain only ASCII characters.
:param str content_type: The format of the fixed response. Valid values: `text/plain`, `text/css`, `text/html`, `application/javascript`, and `application/json`.
:param str http_code: The HTTP status code of the response. The code must be an `HTTP_2xx`, `HTTP_4xx` or `HTTP_5xx.x` is a digit.
"""
pulumi.set(__self__, "content", content)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if http_code is not None:
pulumi.set(__self__, "http_code", http_code)
@property
@pulumi.getter
def content(self) -> str:
"""
The fixed response. The response cannot exceed 1 KB in size and can contain only ASCII characters.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[str]:
"""
The format of the fixed response. Valid values: `text/plain`, `text/css`, `text/html`, `application/javascript`, and `application/json`.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="httpCode")
def http_code(self) -> Optional[str]:
"""
The HTTP status code of the response. The code must be an `HTTP_2xx`, `HTTP_4xx` or `HTTP_5xx.x` is a digit.
"""
return pulumi.get(self, "http_code")
@pulumi.output_type
class RuleRuleActionForwardGroupConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverGroupTuples":
suggest = "server_group_tuples"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleActionForwardGroupConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleActionForwardGroupConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleActionForwardGroupConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
server_group_tuples: Optional[Sequence['outputs.RuleRuleActionForwardGroupConfigServerGroupTuple']] = None):
"""
:param Sequence['RuleRuleActionForwardGroupConfigServerGroupTupleArgs'] server_group_tuples: The destination server group to which requests are forwarded.
"""
if server_group_tuples is not None:
pulumi.set(__self__, "server_group_tuples", server_group_tuples)
@property
@pulumi.getter(name="serverGroupTuples")
def server_group_tuples(self) -> Optional[Sequence['outputs.RuleRuleActionForwardGroupConfigServerGroupTuple']]:
"""
The destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_tuples")
@pulumi.output_type
class RuleRuleActionForwardGroupConfigServerGroupTuple(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverGroupId":
suggest = "server_group_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleActionForwardGroupConfigServerGroupTuple. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleActionForwardGroupConfigServerGroupTuple.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleActionForwardGroupConfigServerGroupTuple.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
server_group_id: Optional[str] = None):
"""
:param str server_group_id: The ID of the destination server group to which requests are forwarded.
"""
if server_group_id is not None:
pulumi.set(__self__, "server_group_id", server_group_id)
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> Optional[str]:
"""
The ID of the destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_id")
@pulumi.output_type
class RuleRuleActionInsertHeaderConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "valueType":
suggest = "value_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleActionInsertHeaderConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleActionInsertHeaderConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleActionInsertHeaderConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
key: Optional[str] = None,
value: Optional[str] = None,
value_type: Optional[str] = None):
"""
:param str key: The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
:param str value: The content of the inserted header field: If the ValueType parameter is set to SystemDefined, the following values are used: ClientSrcPort: the port of the client ClientSrcIp: the IP address of the client Protocol: the protocol used by client requests (HTTP or HTTPS) SLBId: the ID of the ALB instance SLBPort: the listener port of the ALB instance If the ValueType parameter is set to UserDefined: The header value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters whose ASCII value is ch >= 32 && ch < 127, and wildcards such as asterisks (*) and question marks (?). The header value cannot start or end with a space. If the ValueType parameter is set to ReferenceHeader: The header value must be 1 to 128 characters in length, and can contain lowercase letters, digits, underscores (_), and hyphens (-). Valid values: `ClientSrcPort`, `ClientSrcIp`, `Protocol`, `SLBId`, `SLBPort`, `UserDefined`.
:param str value_type: Valid values: UserDefined: a custom value ReferenceHeader: uses a field of the user request header. SystemDefined: a system value.
"""
if key is not None:
pulumi.set(__self__, "key", key)
if value is not None:
pulumi.set(__self__, "value", value)
if value_type is not None:
pulumi.set(__self__, "value_type", value_type)
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
The content of the inserted header field: If the ValueType parameter is set to SystemDefined, the following values are used: ClientSrcPort: the port of the client ClientSrcIp: the IP address of the client Protocol: the protocol used by client requests (HTTP or HTTPS) SLBId: the ID of the ALB instance SLBPort: the listener port of the ALB instance If the ValueType parameter is set to UserDefined: The header value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters whose ASCII value is ch >= 32 && ch < 127, and wildcards such as asterisks (*) and question marks (?). The header value cannot start or end with a space. If the ValueType parameter is set to ReferenceHeader: The header value must be 1 to 128 characters in length, and can contain lowercase letters, digits, underscores (_), and hyphens (-). Valid values: `ClientSrcPort`, `ClientSrcIp`, `Protocol`, `SLBId`, `SLBPort`, `UserDefined`.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter(name="valueType")
def value_type(self) -> Optional[str]:
"""
Valid values: UserDefined: a custom value ReferenceHeader: uses a field of the user request header. SystemDefined: a system value.
"""
return pulumi.get(self, "value_type")
@pulumi.output_type
class RuleRuleActionRedirectConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "httpCode":
suggest = "http_code"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleActionRedirectConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleActionRedirectConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleActionRedirectConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
host: Optional[str] = None,
http_code: Optional[str] = None,
path: Optional[str] = None,
port: Optional[int] = None,
protocol: Optional[str] = None,
query: Optional[str] = None):
"""
:param str host: The host name of the destination to which requests are directed. The host name must meet the following rules: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
:param str http_code: The HTTP status code of the response. The code must be an `HTTP_2xx`, `HTTP_4xx` or `HTTP_5xx.x` is a digit.
:param str path: The path of the destination to which requests are directed. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?) and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them with a valid string.
:param int port: The port of the destination to which requests are redirected. Valid values: 1 to 63335. Default value: ${port}. You cannot use this value together with other characters at the same time.
:param str protocol: The protocol of the requests to be redirected. Valid values: HTTP and HTTPS. Default value: ${protocol}. You cannot use this value together with other characters at the same time. Note HTTPS listeners can redirect only HTTPS requests.
:param str query: The query string of the request to be redirected. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them together with a valid string.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if http_code is not None:
pulumi.set(__self__, "http_code", http_code)
if path is not None:
pulumi.set(__self__, "path", path)
if port is not None:
pulumi.set(__self__, "port", port)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if query is not None:
pulumi.set(__self__, "query", query)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The host name of the destination to which requests are directed. The host name must meet the following rules: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter(name="httpCode")
def http_code(self) -> Optional[str]:
"""
The HTTP status code of the response. The code must be an `HTTP_2xx`, `HTTP_4xx` or `HTTP_5xx.x` is a digit.
"""
return pulumi.get(self, "http_code")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
The path of the destination to which requests are directed. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?) and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them with a valid string.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
The port of the destination to which requests are redirected. Valid values: 1 to 63335. Default value: ${port}. You cannot use this value together with other characters at the same time.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> Optional[str]:
"""
The protocol of the requests to be redirected. Valid values: HTTP and HTTPS. Default value: ${protocol}. You cannot use this value together with other characters at the same time. Note HTTPS listeners can redirect only HTTPS requests.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def query(self) -> Optional[str]:
"""
The query string of the request to be redirected. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them together with a valid string.
"""
return pulumi.get(self, "query")
@pulumi.output_type
class RuleRuleActionRewriteConfig(dict):
def __init__(__self__, *,
host: Optional[str] = None,
path: Optional[str] = None,
query: Optional[str] = None):
"""
:param str host: The host name of the destination to which requests are directed. The host name must meet the following rules: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
:param str path: The path of the destination to which requests are directed. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?) and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them with a valid string.
:param str query: The query string of the request to be redirected. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them together with a valid string.
"""
if host is not None:
pulumi.set(__self__, "host", host)
if path is not None:
pulumi.set(__self__, "path", path)
if query is not None:
pulumi.set(__self__, "query", query)
@property
@pulumi.getter
def host(self) -> Optional[str]:
"""
The host name of the destination to which requests are directed. The host name must meet the following rules: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
The path of the destination to which requests are directed. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?) and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them with a valid string.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def query(self) -> Optional[str]:
"""
The query string of the request to be redirected. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. You can also reference ${host}, ${protocol}, and ${port}. Each variable can appear at most once. You can use the preceding variables at the same time, or use them together with a valid string.
"""
return pulumi.get(self, "query")
@pulumi.output_type
class RuleRuleCondition(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cookieConfig":
suggest = "cookie_config"
elif key == "headerConfig":
suggest = "header_config"
elif key == "hostConfig":
suggest = "host_config"
elif key == "methodConfig":
suggest = "method_config"
elif key == "pathConfig":
suggest = "path_config"
elif key == "queryStringConfig":
suggest = "query_string_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in RuleRuleCondition. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
RuleRuleCondition.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
RuleRuleCondition.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
type: str,
cookie_config: Optional['outputs.RuleRuleConditionCookieConfig'] = None,
header_config: Optional['outputs.RuleRuleConditionHeaderConfig'] = None,
host_config: Optional['outputs.RuleRuleConditionHostConfig'] = None,
method_config: Optional['outputs.RuleRuleConditionMethodConfig'] = None,
path_config: Optional['outputs.RuleRuleConditionPathConfig'] = None,
query_string_config: Optional['outputs.RuleRuleConditionQueryStringConfig'] = None):
"""
:param str type: The action. Valid values: `ForwardGroup`, `Redirect`, `FixedResponse`, `Rewrite`, `InsertHeader`. **Note:** The preceding actions can be classified into two types: `FinalType`: A forwarding rule can contain only one `FinalType` action, which is executed last. This type of action can contain only one `ForwardGroup`, `Redirect` or `FixedResponse` action. `ExtType`: A forwarding rule can contain one or more `ExtType` actions, which are executed before `FinalType` actions and need to coexist with the `FinalType` actions. This type of action can contain multiple `InsertHeader` actions or one `Rewrite` action.
:param 'RuleRuleConditionCookieConfigArgs' cookie_config: The configuration of the cookie. See the following `Block cookie_config`.
:param 'RuleRuleConditionHeaderConfigArgs' header_config: The configuration of the header field. See the following `Block header_config`.
:param 'RuleRuleConditionHostConfigArgs' host_config: The configuration of the host field. See the following `Block host_config`.
:param 'RuleRuleConditionMethodConfigArgs' method_config: The configuration of the request method. See the following `Block method_config`.
:param 'RuleRuleConditionPathConfigArgs' path_config: The configuration of the path for the request to be forwarded. See the following `Block path_config`.
:param 'RuleRuleConditionQueryStringConfigArgs' query_string_config: The configuration of the query string. See the following `Block query_string_config`.
"""
pulumi.set(__self__, "type", type)
if cookie_config is not None:
pulumi.set(__self__, "cookie_config", cookie_config)
if header_config is not None:
pulumi.set(__self__, "header_config", header_config)
if host_config is not None:
pulumi.set(__self__, "host_config", host_config)
if method_config is not None:
pulumi.set(__self__, "method_config", method_config)
if path_config is not None:
pulumi.set(__self__, "path_config", path_config)
if query_string_config is not None:
pulumi.set(__self__, "query_string_config", query_string_config)
@property
@pulumi.getter
def type(self) -> str:
"""
The action. Valid values: `ForwardGroup`, `Redirect`, `FixedResponse`, `Rewrite`, `InsertHeader`. **Note:** The preceding actions can be classified into two types: `FinalType`: A forwarding rule can contain only one `FinalType` action, which is executed last. This type of action can contain only one `ForwardGroup`, `Redirect` or `FixedResponse` action. `ExtType`: A forwarding rule can contain one or more `ExtType` actions, which are executed before `FinalType` actions and need to coexist with the `FinalType` actions. This type of action can contain multiple `InsertHeader` actions or one `Rewrite` action.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="cookieConfig")
def cookie_config(self) -> Optional['outputs.RuleRuleConditionCookieConfig']:
"""
The configuration of the cookie. See the following `Block cookie_config`.
"""
return pulumi.get(self, "cookie_config")
@property
@pulumi.getter(name="headerConfig")
def header_config(self) -> Optional['outputs.RuleRuleConditionHeaderConfig']:
"""
The configuration of the header field. See the following `Block header_config`.
"""
return pulumi.get(self, "header_config")
@property
@pulumi.getter(name="hostConfig")
def host_config(self) -> Optional['outputs.RuleRuleConditionHostConfig']:
"""
The configuration of the host field. See the following `Block host_config`.
"""
return pulumi.get(self, "host_config")
@property
@pulumi.getter(name="methodConfig")
def method_config(self) -> Optional['outputs.RuleRuleConditionMethodConfig']:
"""
The configuration of the request method. See the following `Block method_config`.
"""
return pulumi.get(self, "method_config")
@property
@pulumi.getter(name="pathConfig")
def path_config(self) -> Optional['outputs.RuleRuleConditionPathConfig']:
"""
The configuration of the path for the request to be forwarded. See the following `Block path_config`.
"""
return pulumi.get(self, "path_config")
@property
@pulumi.getter(name="queryStringConfig")
def query_string_config(self) -> Optional['outputs.RuleRuleConditionQueryStringConfig']:
"""
The configuration of the query string. See the following `Block query_string_config`.
"""
return pulumi.get(self, "query_string_config")
@pulumi.output_type
class RuleRuleConditionCookieConfig(dict):
def __init__(__self__, *,
values: Optional[Sequence['outputs.RuleRuleConditionCookieConfigValue']] = None):
"""
:param Sequence['RuleRuleConditionCookieConfigValueArgs'] values: The query string.
"""
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Optional[Sequence['outputs.RuleRuleConditionCookieConfigValue']]:
"""
The query string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class RuleRuleConditionCookieConfigValue(dict):
def __init__(__self__, *,
key: Optional[str] = None,
value: Optional[str] = None):
"""
:param str key: The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
:param str value: The content of the inserted header field: If the ValueType parameter is set to SystemDefined, the following values are used: ClientSrcPort: the port of the client ClientSrcIp: the IP address of the client Protocol: the protocol used by client requests (HTTP or HTTPS) SLBId: the ID of the ALB instance SLBPort: the listener port of the ALB instance If the ValueType parameter is set to UserDefined: The header value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters whose ASCII value is ch >= 32 && ch < 127, and wildcards such as asterisks (*) and question marks (?). The header value cannot start or end with a space. If the ValueType parameter is set to ReferenceHeader: The header value must be 1 to 128 characters in length, and can contain lowercase letters, digits, underscores (_), and hyphens (-). Valid values: `ClientSrcPort`, `ClientSrcIp`, `Protocol`, `SLBId`, `SLBPort`, `UserDefined`.
"""
if key is not None:
pulumi.set(__self__, "key", key)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
The content of the inserted header field: If the ValueType parameter is set to SystemDefined, the following values are used: ClientSrcPort: the port of the client ClientSrcIp: the IP address of the client Protocol: the protocol used by client requests (HTTP or HTTPS) SLBId: the ID of the ALB instance SLBPort: the listener port of the ALB instance If the ValueType parameter is set to UserDefined: The header value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters whose ASCII value is ch >= 32 && ch < 127, and wildcards such as asterisks (*) and question marks (?). The header value cannot start or end with a space. If the ValueType parameter is set to ReferenceHeader: The header value must be 1 to 128 characters in length, and can contain lowercase letters, digits, underscores (_), and hyphens (-). Valid values: `ClientSrcPort`, `ClientSrcIp`, `Protocol`, `SLBId`, `SLBPort`, `UserDefined`.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class RuleRuleConditionHeaderConfig(dict):
def __init__(__self__, *,
key: Optional[str] = None,
values: Optional[Sequence[str]] = None):
"""
:param str key: The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
:param Sequence[str] values: The query string.
"""
if key is not None:
pulumi.set(__self__, "key", key)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
The query string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class RuleRuleConditionHostConfig(dict):
def __init__(__self__, *,
values: Optional[Sequence[str]] = None):
"""
:param Sequence[str] values: The query string.
"""
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
The query string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class RuleRuleConditionMethodConfig(dict):
def __init__(__self__, *,
values: Optional[Sequence[str]] = None):
"""
:param Sequence[str] values: The query string.
"""
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
The query string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class RuleRuleConditionPathConfig(dict):
def __init__(__self__, *,
values: Optional[Sequence[str]] = None):
"""
:param Sequence[str] values: The query string.
"""
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Optional[Sequence[str]]:
"""
The query string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class RuleRuleConditionQueryStringConfig(dict):
def __init__(__self__, *,
values: Optional[Sequence['outputs.RuleRuleConditionQueryStringConfigValue']] = None):
"""
:param Sequence['RuleRuleConditionQueryStringConfigValueArgs'] values: The query string.
"""
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Optional[Sequence['outputs.RuleRuleConditionQueryStringConfigValue']]:
"""
The query string.
"""
return pulumi.get(self, "values")
@pulumi.output_type
class RuleRuleConditionQueryStringConfigValue(dict):
def __init__(__self__, *,
key: Optional[str] = None,
value: Optional[str] = None):
"""
:param str key: The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
:param str value: The content of the inserted header field: If the ValueType parameter is set to SystemDefined, the following values are used: ClientSrcPort: the port of the client ClientSrcIp: the IP address of the client Protocol: the protocol used by client requests (HTTP or HTTPS) SLBId: the ID of the ALB instance SLBPort: the listener port of the ALB instance If the ValueType parameter is set to UserDefined: The header value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters whose ASCII value is ch >= 32 && ch < 127, and wildcards such as asterisks (*) and question marks (?). The header value cannot start or end with a space. If the ValueType parameter is set to ReferenceHeader: The header value must be 1 to 128 characters in length, and can contain lowercase letters, digits, underscores (_), and hyphens (-). Valid values: `ClientSrcPort`, `ClientSrcIp`, `Protocol`, `SLBId`, `SLBPort`, `UserDefined`.
"""
if key is not None:
pulumi.set(__self__, "key", key)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> Optional[str]:
"""
The name of the inserted header field. The name must be 1 to 40 characters in length, and can contain letters, digits, underscores (_), and hyphens (-). You cannot use the same name in InsertHeader. Note You cannot use Cookie or Host in the name.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
The content of the inserted header field: If the ValueType parameter is set to SystemDefined, the following values are used: ClientSrcPort: the port of the client ClientSrcIp: the IP address of the client Protocol: the protocol used by client requests (HTTP or HTTPS) SLBId: the ID of the ALB instance SLBPort: the listener port of the ALB instance If the ValueType parameter is set to UserDefined: The header value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters whose ASCII value is ch >= 32 && ch < 127, and wildcards such as asterisks (*) and question marks (?). The header value cannot start or end with a space. If the ValueType parameter is set to ReferenceHeader: The header value must be 1 to 128 characters in length, and can contain lowercase letters, digits, underscores (_), and hyphens (-). Valid values: `ClientSrcPort`, `ClientSrcIp`, `Protocol`, `SLBId`, `SLBPort`, `UserDefined`.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class ServerGroupHealthCheckConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "healthCheckCodes":
suggest = "health_check_codes"
elif key == "healthCheckConnectPort":
suggest = "health_check_connect_port"
elif key == "healthCheckEnabled":
suggest = "health_check_enabled"
elif key == "healthCheckHost":
suggest = "health_check_host"
elif key == "healthCheckHttpVersion":
suggest = "health_check_http_version"
elif key == "healthCheckInterval":
suggest = "health_check_interval"
elif key == "healthCheckMethod":
suggest = "health_check_method"
elif key == "healthCheckPath":
suggest = "health_check_path"
elif key == "healthCheckProtocol":
suggest = "health_check_protocol"
elif key == "healthCheckTimeout":
suggest = "health_check_timeout"
elif key == "healthyThreshold":
suggest = "healthy_threshold"
elif key == "unhealthyThreshold":
suggest = "unhealthy_threshold"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServerGroupHealthCheckConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServerGroupHealthCheckConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServerGroupHealthCheckConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
health_check_codes: Optional[Sequence[str]] = None,
health_check_connect_port: Optional[int] = None,
health_check_enabled: Optional[bool] = None,
health_check_host: Optional[str] = None,
health_check_http_version: Optional[str] = None,
health_check_interval: Optional[int] = None,
health_check_method: Optional[str] = None,
health_check_path: Optional[str] = None,
health_check_protocol: Optional[str] = None,
health_check_timeout: Optional[int] = None,
healthy_threshold: Optional[int] = None,
unhealthy_threshold: Optional[int] = None):
"""
:param Sequence[str] health_check_codes: The status code for a successful health check. Multiple status codes can be specified as a
list. Valid values: `http_2xx`, `http_3xx`, `http_4xx`, and `http_5xx`. Default value: `http_2xx`. **NOTE:** This
parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param int health_check_connect_port: The port of the backend server that is used for health checks. Valid values: `0`
to `65535`. Default value: `0`. A value of 0 indicates that a backend server port is used for health checks.
:param bool health_check_enabled: Indicates whether health checks are enabled. Valid values: `true`, `false`. Default
value: `true`.
:param str health_check_host: The domain name that is used for health checks.
:param str health_check_http_version: HTTP protocol version. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`
. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param int health_check_interval: The time interval between two consecutive health checks. Unit: seconds. Valid values: `1`
to `50`. Default value: `2`.
:param str health_check_method: Health check method. Valid values: `GET` and `HEAD`. Default: `GET`. **NOTE:** This parameter
exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param str health_check_path: The forwarding rule path of health checks. **NOTE:** This parameter exists if
the `HealthCheckProtocol` parameter is set to `HTTP`.
:param str health_check_protocol: Health check protocol. Valid values: `HTTP` and `TCP`.
:param int health_check_timeout: The timeout period of a health check response. If a backend Elastic Compute Service (ECS)
instance does not send an expected response within the specified period of time, the ECS instance is considered
unhealthy. Unit: seconds. Valid values: 1 to 300. Default value: 5. **NOTE:** If the value of the `HealthCHeckTimeout`
parameter is smaller than that of the `HealthCheckInterval` parameter, the value of the `HealthCHeckTimeout` parameter
is ignored and the value of the `HealthCheckInterval` parameter is regarded as the timeout period.
:param int healthy_threshold: The number of health checks that an unhealthy backend server must pass consecutively before it
is declared healthy. In this case, the health check state is changed from fail to success. Valid values: 2 to 10.
Default value: 3.
:param int unhealthy_threshold: The number of consecutive health checks that a healthy backend server must consecutively fail
before it is declared unhealthy. In this case, the health check state is changed from success to fail. Valid
values: `2` to `10`. Default value: `3`.
"""
if health_check_codes is not None:
pulumi.set(__self__, "health_check_codes", health_check_codes)
if health_check_connect_port is not None:
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
if health_check_enabled is not None:
pulumi.set(__self__, "health_check_enabled", health_check_enabled)
if health_check_host is not None:
pulumi.set(__self__, "health_check_host", health_check_host)
if health_check_http_version is not None:
pulumi.set(__self__, "health_check_http_version", health_check_http_version)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_method is not None:
pulumi.set(__self__, "health_check_method", health_check_method)
if health_check_path is not None:
pulumi.set(__self__, "health_check_path", health_check_path)
if health_check_protocol is not None:
pulumi.set(__self__, "health_check_protocol", health_check_protocol)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter(name="healthCheckCodes")
def health_check_codes(self) -> Optional[Sequence[str]]:
"""
The status code for a successful health check. Multiple status codes can be specified as a
list. Valid values: `http_2xx`, `http_3xx`, `http_4xx`, and `http_5xx`. Default value: `http_2xx`. **NOTE:** This
parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_codes")
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> Optional[int]:
"""
The port of the backend server that is used for health checks. Valid values: `0`
to `65535`. Default value: `0`. A value of 0 indicates that a backend server port is used for health checks.
"""
return pulumi.get(self, "health_check_connect_port")
@property
@pulumi.getter(name="healthCheckEnabled")
def health_check_enabled(self) -> Optional[bool]:
"""
Indicates whether health checks are enabled. Valid values: `true`, `false`. Default
value: `true`.
"""
return pulumi.get(self, "health_check_enabled")
@property
@pulumi.getter(name="healthCheckHost")
def health_check_host(self) -> Optional[str]:
"""
The domain name that is used for health checks.
"""
return pulumi.get(self, "health_check_host")
@property
@pulumi.getter(name="healthCheckHttpVersion")
def health_check_http_version(self) -> Optional[str]:
"""
HTTP protocol version. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`
. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_http_version")
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[int]:
"""
The time interval between two consecutive health checks. Unit: seconds. Valid values: `1`
to `50`. Default value: `2`.
"""
return pulumi.get(self, "health_check_interval")
@property
@pulumi.getter(name="healthCheckMethod")
def health_check_method(self) -> Optional[str]:
"""
Health check method. Valid values: `GET` and `HEAD`. Default: `GET`. **NOTE:** This parameter
exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_method")
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> Optional[str]:
"""
The forwarding rule path of health checks. **NOTE:** This parameter exists if
the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_path")
@property
@pulumi.getter(name="healthCheckProtocol")
def health_check_protocol(self) -> Optional[str]:
"""
Health check protocol. Valid values: `HTTP` and `TCP`.
"""
return pulumi.get(self, "health_check_protocol")
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[int]:
"""
The timeout period of a health check response. If a backend Elastic Compute Service (ECS)
instance does not send an expected response within the specified period of time, the ECS instance is considered
unhealthy. Unit: seconds. Valid values: 1 to 300. Default value: 5. **NOTE:** If the value of the `HealthCHeckTimeout`
parameter is smaller than that of the `HealthCheckInterval` parameter, the value of the `HealthCHeckTimeout` parameter
is ignored and the value of the `HealthCheckInterval` parameter is regarded as the timeout period.
"""
return pulumi.get(self, "health_check_timeout")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[int]:
"""
The number of health checks that an unhealthy backend server must pass consecutively before it
is declared healthy. In this case, the health check state is changed from fail to success. Valid values: 2 to 10.
Default value: 3.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[int]:
"""
The number of consecutive health checks that a healthy backend server must consecutively fail
before it is declared unhealthy. In this case, the health check state is changed from success to fail. Valid
values: `2` to `10`. Default value: `3`.
"""
return pulumi.get(self, "unhealthy_threshold")
@pulumi.output_type
class ServerGroupServer(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverId":
suggest = "server_id"
elif key == "serverIp":
suggest = "server_ip"
elif key == "serverType":
suggest = "server_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServerGroupServer. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServerGroupServer.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServerGroupServer.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
description: Optional[str] = None,
port: Optional[int] = None,
server_id: Optional[str] = None,
server_ip: Optional[str] = None,
server_type: Optional[str] = None,
status: Optional[str] = None,
weight: Optional[int] = None):
"""
:param str description: The description of the server.
:param int port: The port that is used by the server. Valid values: `1` to `65535`.
:param str server_id: The ID of the ECS instance, ENI instance or ECI instance.
:param str server_ip: The IP address of the ENI instance when it is in the inclusive ENI mode.
:param str server_type: The type of the server. The type of the server. Valid values: `Ecs`, `Eni` and `Eci`.
:param str status: The status of the resource.
:param int weight: The weight of the server. Valid values: `0` to `100`. Default value: `100`. If the value is set to `0`, no
requests are forwarded to the server.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if port is not None:
pulumi.set(__self__, "port", port)
if server_id is not None:
pulumi.set(__self__, "server_id", server_id)
if server_ip is not None:
pulumi.set(__self__, "server_ip", server_ip)
if server_type is not None:
pulumi.set(__self__, "server_type", server_type)
if status is not None:
pulumi.set(__self__, "status", status)
if weight is not None:
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The description of the server.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
The port that is used by the server. Valid values: `1` to `65535`.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="serverId")
def server_id(self) -> Optional[str]:
"""
The ID of the ECS instance, ENI instance or ECI instance.
"""
return pulumi.get(self, "server_id")
@property
@pulumi.getter(name="serverIp")
def server_ip(self) -> Optional[str]:
"""
The IP address of the ENI instance when it is in the inclusive ENI mode.
"""
return pulumi.get(self, "server_ip")
@property
@pulumi.getter(name="serverType")
def server_type(self) -> Optional[str]:
"""
The type of the server. The type of the server. Valid values: `Ecs`, `Eni` and `Eci`.
"""
return pulumi.get(self, "server_type")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The status of the resource.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def weight(self) -> Optional[int]:
"""
The weight of the server. Valid values: `0` to `100`. Default value: `100`. If the value is set to `0`, no
requests are forwarded to the server.
"""
return pulumi.get(self, "weight")
@pulumi.output_type
class ServerGroupStickySessionConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cookieTimeout":
suggest = "cookie_timeout"
elif key == "stickySessionEnabled":
suggest = "sticky_session_enabled"
elif key == "stickySessionType":
suggest = "sticky_session_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServerGroupStickySessionConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServerGroupStickySessionConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServerGroupStickySessionConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cookie: Optional[str] = None,
cookie_timeout: Optional[int] = None,
sticky_session_enabled: Optional[bool] = None,
sticky_session_type: Optional[str] = None):
"""
:param str cookie: the cookie that is configured on the server. **NOTE:** This parameter exists if the `StickySession`
parameter is set to `On` and the `StickySessionType` parameter is set to `server`.
:param int cookie_timeout: The timeout period of a cookie. The timeout period of a cookie. Unit: seconds. Valid values: `1`
to `86400`. Default value: `1000`.
:param bool sticky_session_enabled: Indicates whether sticky session is enabled. Values: `true` and `false`. Default
value: `false`. **NOTE:** This parameter exists if the `StickySession` parameter is set to `On`.
:param str sticky_session_type: The method that is used to handle a cookie. Values: `Server` and `Insert`.
"""
if cookie is not None:
pulumi.set(__self__, "cookie", cookie)
if cookie_timeout is not None:
pulumi.set(__self__, "cookie_timeout", cookie_timeout)
if sticky_session_enabled is not None:
pulumi.set(__self__, "sticky_session_enabled", sticky_session_enabled)
if sticky_session_type is not None:
pulumi.set(__self__, "sticky_session_type", sticky_session_type)
@property
@pulumi.getter
def cookie(self) -> Optional[str]:
"""
the cookie that is configured on the server. **NOTE:** This parameter exists if the `StickySession`
parameter is set to `On` and the `StickySessionType` parameter is set to `server`.
"""
return pulumi.get(self, "cookie")
@property
@pulumi.getter(name="cookieTimeout")
def cookie_timeout(self) -> Optional[int]:
"""
The timeout period of a cookie. The timeout period of a cookie. Unit: seconds. Valid values: `1`
to `86400`. Default value: `1000`.
"""
return pulumi.get(self, "cookie_timeout")
@property
@pulumi.getter(name="stickySessionEnabled")
def sticky_session_enabled(self) -> Optional[bool]:
"""
Indicates whether sticky session is enabled. Values: `true` and `false`. Default
value: `false`. **NOTE:** This parameter exists if the `StickySession` parameter is set to `On`.
"""
return pulumi.get(self, "sticky_session_enabled")
@property
@pulumi.getter(name="stickySessionType")
def sticky_session_type(self) -> Optional[str]:
"""
The method that is used to handle a cookie. Values: `Server` and `Insert`.
"""
return pulumi.get(self, "sticky_session_type")
@pulumi.output_type
class GetAclsAclResult(dict):
def __init__(__self__, *,
acl_entries: Sequence['outputs.GetAclsAclAclEntryResult'],
acl_id: str,
acl_name: str,
address_ip_version: str,
id: str,
resource_group_id: str,
status: str):
"""
:param Sequence['GetAclsAclAclEntryArgs'] acl_entries: ACL Entries.
:param str acl_id: Access Control Policy ID.
:param str acl_name: The ACL Name.
:param str address_ip_version: Address Protocol Version.
:param str id: The ID of the Acl.
:param str resource_group_id: Resource Group to Which the Number.
:param str status: The state of the ACL. Valid values:`Provisioning` , `Available` and `Configuring`. `Provisioning`: The ACL is being created. `Available`: The ACL is available. `Configuring`: The ACL is being configured.
"""
pulumi.set(__self__, "acl_entries", acl_entries)
pulumi.set(__self__, "acl_id", acl_id)
pulumi.set(__self__, "acl_name", acl_name)
pulumi.set(__self__, "address_ip_version", address_ip_version)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "resource_group_id", resource_group_id)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="aclEntries")
def acl_entries(self) -> Sequence['outputs.GetAclsAclAclEntryResult']:
"""
ACL Entries.
"""
return pulumi.get(self, "acl_entries")
@property
@pulumi.getter(name="aclId")
def acl_id(self) -> str:
"""
Access Control Policy ID.
"""
return pulumi.get(self, "acl_id")
@property
@pulumi.getter(name="aclName")
def acl_name(self) -> str:
"""
The ACL Name.
"""
return pulumi.get(self, "acl_name")
@property
@pulumi.getter(name="addressIpVersion")
def address_ip_version(self) -> str:
"""
Address Protocol Version.
"""
return pulumi.get(self, "address_ip_version")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Acl.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> str:
"""
Resource Group to Which the Number.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter
def status(self) -> str:
"""
The state of the ACL. Valid values:`Provisioning` , `Available` and `Configuring`. `Provisioning`: The ACL is being created. `Available`: The ACL is available. `Configuring`: The ACL is being configured.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class GetAclsAclAclEntryResult(dict):
def __init__(__self__, *,
description: str,
entry: str,
status: str):
"""
:param str description: Access Control Entries Note Description Length Is Limited to 1 to 256 Characters, Letters, digital, the Dash (-), a Forward Slash (/), Half a Period (.) and Underscores (_), Support Chinese Characters.
:param str status: The state of the ACL. Valid values:`Provisioning` , `Available` and `Configuring`. `Provisioning`: The ACL is being created. `Available`: The ACL is available. `Configuring`: The ACL is being configured.
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "entry", entry)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def description(self) -> str:
"""
Access Control Entries Note Description Length Is Limited to 1 to 256 Characters, Letters, digital, the Dash (-), a Forward Slash (/), Half a Period (.) and Underscores (_), Support Chinese Characters.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def entry(self) -> str:
return pulumi.get(self, "entry")
@property
@pulumi.getter
def status(self) -> str:
"""
The state of the ACL. Valid values:`Provisioning` , `Available` and `Configuring`. `Provisioning`: The ACL is being created. `Available`: The ACL is available. `Configuring`: The ACL is being configured.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class GetHealthCheckTemplatesTemplateResult(dict):
def __init__(__self__, *,
health_check_codes: Sequence[str],
health_check_connect_port: int,
health_check_host: str,
health_check_http_version: str,
health_check_interval: int,
health_check_method: str,
health_check_path: str,
health_check_protocol: str,
health_check_template_id: str,
health_check_template_name: str,
health_check_timeout: int,
healthy_threshold: int,
id: str,
unhealthy_threshold: int):
"""
:param Sequence[str] health_check_codes: The HTTP status code that indicates a successful health check.
:param int health_check_connect_port: The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value:` 0`. This default value indicates that the backend server is used for health checks.
:param str health_check_host: The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length.
:param str health_check_http_version: The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`.
:param int health_check_interval: The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
:param str health_check_method: The health check method. Valid values: `GET` and `HEAD`. Default value: `HEAD`.
:param str health_check_path: The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: ` _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/)`.
:param str health_check_protocol: The protocol that is used for health checks. Valid values: HTTP and TCP. Default value: HTTP.
:param str health_check_template_id: The ID of the resource.
:param str health_check_template_name: The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
:param int health_check_timeout: The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
:param int healthy_threshold: The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
:param str id: The ID of the Health Check Template.
:param int unhealthy_threshold: The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
pulumi.set(__self__, "health_check_codes", health_check_codes)
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
pulumi.set(__self__, "health_check_host", health_check_host)
pulumi.set(__self__, "health_check_http_version", health_check_http_version)
pulumi.set(__self__, "health_check_interval", health_check_interval)
pulumi.set(__self__, "health_check_method", health_check_method)
pulumi.set(__self__, "health_check_path", health_check_path)
pulumi.set(__self__, "health_check_protocol", health_check_protocol)
pulumi.set(__self__, "health_check_template_id", health_check_template_id)
pulumi.set(__self__, "health_check_template_name", health_check_template_name)
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter(name="healthCheckCodes")
def health_check_codes(self) -> Sequence[str]:
"""
The HTTP status code that indicates a successful health check.
"""
return pulumi.get(self, "health_check_codes")
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> int:
"""
The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value:` 0`. This default value indicates that the backend server is used for health checks.
"""
return pulumi.get(self, "health_check_connect_port")
@property
@pulumi.getter(name="healthCheckHost")
def health_check_host(self) -> str:
"""
The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length.
"""
return pulumi.get(self, "health_check_host")
@property
@pulumi.getter(name="healthCheckHttpVersion")
def health_check_http_version(self) -> str:
"""
The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`.
"""
return pulumi.get(self, "health_check_http_version")
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> int:
"""
The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
"""
return pulumi.get(self, "health_check_interval")
@property
@pulumi.getter(name="healthCheckMethod")
def health_check_method(self) -> str:
"""
The health check method. Valid values: `GET` and `HEAD`. Default value: `HEAD`.
"""
return pulumi.get(self, "health_check_method")
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> str:
"""
The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: ` _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/)`.
"""
return pulumi.get(self, "health_check_path")
@property
@pulumi.getter(name="healthCheckProtocol")
def health_check_protocol(self) -> str:
"""
The protocol that is used for health checks. Valid values: HTTP and TCP. Default value: HTTP.
"""
return pulumi.get(self, "health_check_protocol")
@property
@pulumi.getter(name="healthCheckTemplateId")
def health_check_template_id(self) -> str:
"""
The ID of the resource.
"""
return pulumi.get(self, "health_check_template_id")
@property
@pulumi.getter(name="healthCheckTemplateName")
def health_check_template_name(self) -> str:
"""
The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
"""
return pulumi.get(self, "health_check_template_name")
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> int:
"""
The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
"""
return pulumi.get(self, "health_check_timeout")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> int:
"""
The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Health Check Template.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> int:
"""
The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "unhealthy_threshold")
@pulumi.output_type
class GetListenersListenerResult(dict):
def __init__(__self__, *,
access_log_record_customized_headers_enabled: bool,
access_log_tracing_configs: Sequence['outputs.GetListenersListenerAccessLogTracingConfigResult'],
acl_configs: Sequence['outputs.GetListenersListenerAclConfigResult'],
certificates: Sequence['outputs.GetListenersListenerCertificateResult'],
default_actions: Sequence['outputs.GetListenersListenerDefaultActionResult'],
gzip_enabled: bool,
http2_enabled: bool,
id: str,
idle_timeout: int,
listener_description: str,
listener_id: str,
listener_port: int,
listener_protocol: str,
load_balancer_id: str,
max_results: str,
next_token: str,
quic_configs: Sequence['outputs.GetListenersListenerQuicConfigResult'],
request_timeout: int,
security_policy_id: str,
status: str,
xforwarded_for_configs: Sequence['outputs.GetListenersListenerXforwardedForConfigResult']):
"""
:param bool access_log_record_customized_headers_enabled: Indicates whether the access log has a custom header field. Valid values: true and false. Default value: false.
:param Sequence['GetListenersListenerAccessLogTracingConfigArgs'] access_log_tracing_configs: Xtrace Configuration Information.
:param Sequence['GetListenersListenerAclConfigArgs'] acl_configs: The configurations of the access control lists (ACLs).
:param Sequence['GetListenersListenerCertificateArgs'] certificates: Certificate.
:param Sequence['GetListenersListenerDefaultActionArgs'] default_actions: The Default Rule Action List.
:param bool gzip_enabled: Whether to Enable Gzip Compression, as a Specific File Type on a Compression. Valid Values: `True` Or `False`. Default Value: `True`.
:param bool http2_enabled: Whether to Enable HTTP/2 Features. Valid Values: `True` Or `False`. Default Value: `True`.
:param str id: The ID of the Listener.
:param int idle_timeout: Specify the Connection Idle Timeout Value: `1` to `60`. Unit: Seconds.
:param str listener_description: Set the IP Address of the Listened Description. Length Is from 2 to 256 Characters.
:param str listener_id: on Behalf of the Resource Level Id of the Resources Property Fields.
:param int listener_port: The ALB Instance Front-End, and Those of the Ports Used. Value: `1~65535`.
:param str listener_protocol: Snooping Protocols. Valid Values: `HTTP`, `HTTPS` Or `QUIC`.
:param str load_balancer_id: The ALB Instance Id.
:param str max_results: This Request Returned by the Maximum Number of Records.
:param str next_token: The Current Call Returns to the Position of the Set to Null Represents the Data Has Been Read to the End of.
:param Sequence['GetListenersListenerQuicConfigArgs'] quic_configs: Configuration Associated with the QuIC Listening.
:param int request_timeout: The Specified Request Timeout Time. Value: `1` to `180`. Unit: Seconds. Default Value: 60. If the Timeout Time Within the Back-End Server Has Not Answered the ALB Will Give up Waiting, the Client Returns the HTTP 504 Error Code.
:param str security_policy_id: Security Policy.
:param str status: The association status between the ACL and the listener. Valid values: `Associating`, `Associated` Or `Dissociating`. `Associating`: The ACL is being associated with the listener. `Associated`: The ACL is associated with the listener. `Dissociating`: The ACL is being disassociated from the listener.
:param Sequence['GetListenersListenerXforwardedForConfigArgs'] xforwarded_for_configs: xforwardfor Related Attribute Configuration.
"""
pulumi.set(__self__, "access_log_record_customized_headers_enabled", access_log_record_customized_headers_enabled)
pulumi.set(__self__, "access_log_tracing_configs", access_log_tracing_configs)
pulumi.set(__self__, "acl_configs", acl_configs)
pulumi.set(__self__, "certificates", certificates)
pulumi.set(__self__, "default_actions", default_actions)
pulumi.set(__self__, "gzip_enabled", gzip_enabled)
pulumi.set(__self__, "http2_enabled", http2_enabled)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "idle_timeout", idle_timeout)
pulumi.set(__self__, "listener_description", listener_description)
pulumi.set(__self__, "listener_id", listener_id)
pulumi.set(__self__, "listener_port", listener_port)
pulumi.set(__self__, "listener_protocol", listener_protocol)
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
pulumi.set(__self__, "max_results", max_results)
pulumi.set(__self__, "next_token", next_token)
pulumi.set(__self__, "quic_configs", quic_configs)
pulumi.set(__self__, "request_timeout", request_timeout)
pulumi.set(__self__, "security_policy_id", security_policy_id)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "xforwarded_for_configs", xforwarded_for_configs)
@property
@pulumi.getter(name="accessLogRecordCustomizedHeadersEnabled")
def access_log_record_customized_headers_enabled(self) -> bool:
"""
Indicates whether the access log has a custom header field. Valid values: true and false. Default value: false.
"""
return pulumi.get(self, "access_log_record_customized_headers_enabled")
@property
@pulumi.getter(name="accessLogTracingConfigs")
def access_log_tracing_configs(self) -> Sequence['outputs.GetListenersListenerAccessLogTracingConfigResult']:
"""
Xtrace Configuration Information.
"""
return pulumi.get(self, "access_log_tracing_configs")
@property
@pulumi.getter(name="aclConfigs")
def acl_configs(self) -> Sequence['outputs.GetListenersListenerAclConfigResult']:
"""
The configurations of the access control lists (ACLs).
"""
return pulumi.get(self, "acl_configs")
@property
@pulumi.getter
def certificates(self) -> Sequence['outputs.GetListenersListenerCertificateResult']:
"""
Certificate.
"""
return pulumi.get(self, "certificates")
@property
@pulumi.getter(name="defaultActions")
def default_actions(self) -> Sequence['outputs.GetListenersListenerDefaultActionResult']:
"""
The Default Rule Action List.
"""
return pulumi.get(self, "default_actions")
@property
@pulumi.getter(name="gzipEnabled")
def gzip_enabled(self) -> bool:
"""
Whether to Enable Gzip Compression, as a Specific File Type on a Compression. Valid Values: `True` Or `False`. Default Value: `True`.
"""
return pulumi.get(self, "gzip_enabled")
@property
@pulumi.getter(name="http2Enabled")
def http2_enabled(self) -> bool:
"""
Whether to Enable HTTP/2 Features. Valid Values: `True` Or `False`. Default Value: `True`.
"""
return pulumi.get(self, "http2_enabled")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Listener.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="idleTimeout")
def idle_timeout(self) -> int:
"""
Specify the Connection Idle Timeout Value: `1` to `60`. Unit: Seconds.
"""
return pulumi.get(self, "idle_timeout")
@property
@pulumi.getter(name="listenerDescription")
def listener_description(self) -> str:
"""
Set the IP Address of the Listened Description. Length Is from 2 to 256 Characters.
"""
return pulumi.get(self, "listener_description")
@property
@pulumi.getter(name="listenerId")
def listener_id(self) -> str:
"""
on Behalf of the Resource Level Id of the Resources Property Fields.
"""
return pulumi.get(self, "listener_id")
@property
@pulumi.getter(name="listenerPort")
def listener_port(self) -> int:
"""
The ALB Instance Front-End, and Those of the Ports Used. Value: `1~65535`.
"""
return pulumi.get(self, "listener_port")
@property
@pulumi.getter(name="listenerProtocol")
def listener_protocol(self) -> str:
"""
Snooping Protocols. Valid Values: `HTTP`, `HTTPS` Or `QUIC`.
"""
return pulumi.get(self, "listener_protocol")
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> str:
"""
The ALB Instance Id.
"""
return pulumi.get(self, "load_balancer_id")
@property
@pulumi.getter(name="maxResults")
def max_results(self) -> str:
"""
This Request Returned by the Maximum Number of Records.
"""
return pulumi.get(self, "max_results")
@property
@pulumi.getter(name="nextToken")
def next_token(self) -> str:
"""
The Current Call Returns to the Position of the Set to Null Represents the Data Has Been Read to the End of.
"""
return pulumi.get(self, "next_token")
@property
@pulumi.getter(name="quicConfigs")
def quic_configs(self) -> Sequence['outputs.GetListenersListenerQuicConfigResult']:
"""
Configuration Associated with the QuIC Listening.
"""
return pulumi.get(self, "quic_configs")
@property
@pulumi.getter(name="requestTimeout")
def request_timeout(self) -> int:
"""
The Specified Request Timeout Time. Value: `1` to `180`. Unit: Seconds. Default Value: 60. If the Timeout Time Within the Back-End Server Has Not Answered the ALB Will Give up Waiting, the Client Returns the HTTP 504 Error Code.
"""
return pulumi.get(self, "request_timeout")
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> str:
"""
Security Policy.
"""
return pulumi.get(self, "security_policy_id")
@property
@pulumi.getter
def status(self) -> str:
"""
The association status between the ACL and the listener. Valid values: `Associating`, `Associated` Or `Dissociating`. `Associating`: The ACL is being associated with the listener. `Associated`: The ACL is associated with the listener. `Dissociating`: The ACL is being disassociated from the listener.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="xforwardedForConfigs")
def xforwarded_for_configs(self) -> Sequence['outputs.GetListenersListenerXforwardedForConfigResult']:
"""
xforwardfor Related Attribute Configuration.
"""
return pulumi.get(self, "xforwarded_for_configs")
@pulumi.output_type
class GetListenersListenerAccessLogTracingConfigResult(dict):
def __init__(__self__, *,
tracing_enabled: bool,
tracing_sample: int,
tracing_type: str):
"""
:param bool tracing_enabled: Xtrace Function. Value: True Or False. Default Value: False.
:param int tracing_sample: Xtrace Sampling Rate. Value: **1~10000**.
:param str tracing_type: Xtrace Type Value Is **Zipkin**.
"""
pulumi.set(__self__, "tracing_enabled", tracing_enabled)
pulumi.set(__self__, "tracing_sample", tracing_sample)
pulumi.set(__self__, "tracing_type", tracing_type)
@property
@pulumi.getter(name="tracingEnabled")
def tracing_enabled(self) -> bool:
"""
Xtrace Function. Value: True Or False. Default Value: False.
"""
return pulumi.get(self, "tracing_enabled")
@property
@pulumi.getter(name="tracingSample")
def tracing_sample(self) -> int:
"""
Xtrace Sampling Rate. Value: **1~10000**.
"""
return pulumi.get(self, "tracing_sample")
@property
@pulumi.getter(name="tracingType")
def tracing_type(self) -> str:
"""
Xtrace Type Value Is **Zipkin**.
"""
return pulumi.get(self, "tracing_type")
@pulumi.output_type
class GetListenersListenerAclConfigResult(dict):
def __init__(__self__, *,
acl_relations: Sequence['outputs.GetListenersListenerAclConfigAclRelationResult'],
acl_type: str):
"""
:param Sequence['GetListenersListenerAclConfigAclRelationArgs'] acl_relations: The ACLs that are associated with the listener.
:param str acl_type: The type of the ACL. Valid values: `White` Or `Black`. `White`: specifies the ACL as a whitelist. Only requests from the IP addresses or CIDR blocks in the ACL are forwarded. Whitelists apply to scenarios where only specific IP addresses are allowed to access an application. Risks may occur if the whitelist is improperly set. After you set a whitelist for an Application Load Balancer (ALB) listener, only requests from IP addresses that are added to the whitelist are distributed by the listener. If the whitelist is enabled without IP addresses specified, the ALB listener does not forward requests. `Black`: All requests from the IP addresses or CIDR blocks in the ACL are denied. The blacklist is used to prevent specified IP addresses from accessing an application. If the blacklist is enabled but the corresponding ACL does not contain IP addresses, the ALB listener forwards all requests.
"""
pulumi.set(__self__, "acl_relations", acl_relations)
pulumi.set(__self__, "acl_type", acl_type)
@property
@pulumi.getter(name="aclRelations")
def acl_relations(self) -> Sequence['outputs.GetListenersListenerAclConfigAclRelationResult']:
"""
The ACLs that are associated with the listener.
"""
return pulumi.get(self, "acl_relations")
@property
@pulumi.getter(name="aclType")
def acl_type(self) -> str:
"""
The type of the ACL. Valid values: `White` Or `Black`. `White`: specifies the ACL as a whitelist. Only requests from the IP addresses or CIDR blocks in the ACL are forwarded. Whitelists apply to scenarios where only specific IP addresses are allowed to access an application. Risks may occur if the whitelist is improperly set. After you set a whitelist for an Application Load Balancer (ALB) listener, only requests from IP addresses that are added to the whitelist are distributed by the listener. If the whitelist is enabled without IP addresses specified, the ALB listener does not forward requests. `Black`: All requests from the IP addresses or CIDR blocks in the ACL are denied. The blacklist is used to prevent specified IP addresses from accessing an application. If the blacklist is enabled but the corresponding ACL does not contain IP addresses, the ALB listener forwards all requests.
"""
return pulumi.get(self, "acl_type")
@pulumi.output_type
class GetListenersListenerAclConfigAclRelationResult(dict):
def __init__(__self__, *,
acl_id: str,
status: str):
"""
:param str acl_id: Snooping Binding of the Access Policy Group ID List.
:param str status: The association status between the ACL and the listener. Valid values: `Associating`, `Associated` Or `Dissociating`. `Associating`: The ACL is being associated with the listener. `Associated`: The ACL is associated with the listener. `Dissociating`: The ACL is being disassociated from the listener.
"""
pulumi.set(__self__, "acl_id", acl_id)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="aclId")
def acl_id(self) -> str:
"""
Snooping Binding of the Access Policy Group ID List.
"""
return pulumi.get(self, "acl_id")
@property
@pulumi.getter
def status(self) -> str:
"""
The association status between the ACL and the listener. Valid values: `Associating`, `Associated` Or `Dissociating`. `Associating`: The ACL is being associated with the listener. `Associated`: The ACL is associated with the listener. `Dissociating`: The ACL is being disassociated from the listener.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class GetListenersListenerCertificateResult(dict):
def __init__(__self__, *,
certificate_id: str):
pulumi.set(__self__, "certificate_id", certificate_id)
@property
@pulumi.getter(name="certificateId")
def certificate_id(self) -> str:
return pulumi.get(self, "certificate_id")
@pulumi.output_type
class GetListenersListenerDefaultActionResult(dict):
def __init__(__self__, *,
forward_group_configs: Sequence['outputs.GetListenersListenerDefaultActionForwardGroupConfigResult'],
type: str):
"""
:param Sequence['GetListenersListenerDefaultActionForwardGroupConfigArgs'] forward_group_configs: The configuration of the forwarding rule action. This parameter is required if the Type parameter is set to FowardGroup.
:param str type: Action Type. The value is set to ForwardGroup. It indicates that requests are forwarded to multiple vServer groups.
"""
pulumi.set(__self__, "forward_group_configs", forward_group_configs)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="forwardGroupConfigs")
def forward_group_configs(self) -> Sequence['outputs.GetListenersListenerDefaultActionForwardGroupConfigResult']:
"""
The configuration of the forwarding rule action. This parameter is required if the Type parameter is set to FowardGroup.
"""
return pulumi.get(self, "forward_group_configs")
@property
@pulumi.getter
def type(self) -> str:
"""
Action Type. The value is set to ForwardGroup. It indicates that requests are forwarded to multiple vServer groups.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetListenersListenerDefaultActionForwardGroupConfigResult(dict):
def __init__(__self__, *,
server_group_tuples: Sequence['outputs.GetListenersListenerDefaultActionForwardGroupConfigServerGroupTupleResult']):
"""
:param Sequence['GetListenersListenerDefaultActionForwardGroupConfigServerGroupTupleArgs'] server_group_tuples: The destination server group to which requests are forwarded.
"""
pulumi.set(__self__, "server_group_tuples", server_group_tuples)
@property
@pulumi.getter(name="serverGroupTuples")
def server_group_tuples(self) -> Sequence['outputs.GetListenersListenerDefaultActionForwardGroupConfigServerGroupTupleResult']:
"""
The destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_tuples")
@pulumi.output_type
class GetListenersListenerDefaultActionForwardGroupConfigServerGroupTupleResult(dict):
def __init__(__self__, *,
server_group_id: str):
"""
:param str server_group_id: The ID of the destination server group to which requests are forwarded.
"""
pulumi.set(__self__, "server_group_id", server_group_id)
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> str:
"""
The ID of the destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_id")
@pulumi.output_type
class GetListenersListenerQuicConfigResult(dict):
def __init__(__self__, *,
quic_listener_id: str,
quic_upgrade_enabled: bool):
"""
:param str quic_listener_id: The ID of the QUIC listener to be associated. If QuicUpgradeEnabled is set to true, this parameter is required. Only HTTPS listeners support this parameter.
:param bool quic_upgrade_enabled: Indicates whether quic upgrade is enabled. Valid values: true and false. Default value: false.
"""
pulumi.set(__self__, "quic_listener_id", quic_listener_id)
pulumi.set(__self__, "quic_upgrade_enabled", quic_upgrade_enabled)
@property
@pulumi.getter(name="quicListenerId")
def quic_listener_id(self) -> str:
"""
The ID of the QUIC listener to be associated. If QuicUpgradeEnabled is set to true, this parameter is required. Only HTTPS listeners support this parameter.
"""
return pulumi.get(self, "quic_listener_id")
@property
@pulumi.getter(name="quicUpgradeEnabled")
def quic_upgrade_enabled(self) -> bool:
"""
Indicates whether quic upgrade is enabled. Valid values: true and false. Default value: false.
"""
return pulumi.get(self, "quic_upgrade_enabled")
@pulumi.output_type
class GetListenersListenerXforwardedForConfigResult(dict):
def __init__(__self__, *,
xforwardedforclientcert_issuerdnalias: str,
xforwardedforclientcert_issuerdnenabled: bool,
xforwardedforclientcertclientverifyalias: str,
xforwardedforclientcertclientverifyenabled: bool,
xforwardedforclientcertfingerprintalias: str,
xforwardedforclientcertfingerprintenabled: bool,
xforwardedforclientcertsubjectdnalias: str,
xforwardedforclientcertsubjectdnenabled: bool,
xforwardedforclientsrcportenabled: bool,
xforwardedforenabled: bool,
xforwardedforprotoenabled: bool,
xforwardedforslbidenabled: bool,
xforwardedforslbportenabled: bool):
"""
:param str xforwardedforclientcert_issuerdnalias: The Custom Header Field Names Only When `xforwardedforclientcert_issuerdnenabled`, Which Evaluates to True When the Entry into Force of.
:param bool xforwardedforclientcert_issuerdnenabled: Indicates Whether the `X-Forwarded-Clientcert-issuerdn` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate after the Manifests Are Signed, the Publisher Information.
:param str xforwardedforclientcertclientverifyalias: The Custom Header Field Names Only When `xforwardedforclientcertclientverifyenabled` Has a Value of True, this Value Will Not Take Effect until.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
:param bool xforwardedforclientcertclientverifyenabled: Indicates Whether the `X-Forwarded-Clientcert-clientverify` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate to Verify the Results.
:param str xforwardedforclientcertfingerprintalias: The Custom Header Field Names Only When `xforwardedforclientcertfingerprintenabled`, Which Evaluates to True When the Entry into Force of.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
:param bool xforwardedforclientcertfingerprintenabled: Indicates Whether the `X-Forwarded-Clientcert-fingerprint` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate Fingerprint Value.
:param str xforwardedforclientcertsubjectdnalias: The name of the custom header. This parameter is valid only if `xforwardedforclientcertsubjectdnenabled` is set to true. The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
:param bool xforwardedforclientcertsubjectdnenabled: Specifies whether to use the `X-Forwarded-Clientcert-subjectdn` header field to obtain information about the owner of the ALB client certificate. Valid values: true and false. Default value: false.
:param bool xforwardedforclientsrcportenabled: Indicates Whether the X-Forwarded-Client-Port Header Field Is Used to Obtain Access to Server Load Balancer Instances to the Client, and Those of the Ports.
:param bool xforwardedforenabled: Indicates whether the X-Forwarded-For header field is used to obtain the real IP address of tqhe client. Valid values: true and false. Default value: true.
:param bool xforwardedforprotoenabled: Indicates Whether the X-Forwarded-Proto Header Field Is Used to Obtain the Server Load Balancer Instance Snooping Protocols.
:param bool xforwardedforslbidenabled: Indicates whether the SLB-ID header field is used to obtain the ID of the ALB instance. Valid values: true and false. Default value: false.
:param bool xforwardedforslbportenabled: Indicates Whether the X-Forwarded-Port Header Field Is Used to Obtain the Server Load Balancer Instance Listening Port.
"""
pulumi.set(__self__, "xforwardedforclientcert_issuerdnalias", xforwardedforclientcert_issuerdnalias)
pulumi.set(__self__, "xforwardedforclientcert_issuerdnenabled", xforwardedforclientcert_issuerdnenabled)
pulumi.set(__self__, "xforwardedforclientcertclientverifyalias", xforwardedforclientcertclientverifyalias)
pulumi.set(__self__, "xforwardedforclientcertclientverifyenabled", xforwardedforclientcertclientverifyenabled)
pulumi.set(__self__, "xforwardedforclientcertfingerprintalias", xforwardedforclientcertfingerprintalias)
pulumi.set(__self__, "xforwardedforclientcertfingerprintenabled", xforwardedforclientcertfingerprintenabled)
pulumi.set(__self__, "xforwardedforclientcertsubjectdnalias", xforwardedforclientcertsubjectdnalias)
pulumi.set(__self__, "xforwardedforclientcertsubjectdnenabled", xforwardedforclientcertsubjectdnenabled)
pulumi.set(__self__, "xforwardedforclientsrcportenabled", xforwardedforclientsrcportenabled)
pulumi.set(__self__, "xforwardedforenabled", xforwardedforenabled)
pulumi.set(__self__, "xforwardedforprotoenabled", xforwardedforprotoenabled)
pulumi.set(__self__, "xforwardedforslbidenabled", xforwardedforslbidenabled)
pulumi.set(__self__, "xforwardedforslbportenabled", xforwardedforslbportenabled)
@property
@pulumi.getter(name="xforwardedforclientcertIssuerdnalias")
def xforwardedforclientcert_issuerdnalias(self) -> str:
"""
The Custom Header Field Names Only When `xforwardedforclientcert_issuerdnenabled`, Which Evaluates to True When the Entry into Force of.
"""
return pulumi.get(self, "xforwardedforclientcert_issuerdnalias")
@property
@pulumi.getter(name="xforwardedforclientcertIssuerdnenabled")
def xforwardedforclientcert_issuerdnenabled(self) -> bool:
"""
Indicates Whether the `X-Forwarded-Clientcert-issuerdn` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate after the Manifests Are Signed, the Publisher Information.
"""
return pulumi.get(self, "xforwardedforclientcert_issuerdnenabled")
@property
@pulumi.getter
def xforwardedforclientcertclientverifyalias(self) -> str:
"""
The Custom Header Field Names Only When `xforwardedforclientcertclientverifyenabled` Has a Value of True, this Value Will Not Take Effect until.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
"""
return pulumi.get(self, "xforwardedforclientcertclientverifyalias")
@property
@pulumi.getter
def xforwardedforclientcertclientverifyenabled(self) -> bool:
"""
Indicates Whether the `X-Forwarded-Clientcert-clientverify` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate to Verify the Results.
"""
return pulumi.get(self, "xforwardedforclientcertclientverifyenabled")
@property
@pulumi.getter
def xforwardedforclientcertfingerprintalias(self) -> str:
"""
The Custom Header Field Names Only When `xforwardedforclientcertfingerprintenabled`, Which Evaluates to True When the Entry into Force of.The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
"""
return pulumi.get(self, "xforwardedforclientcertfingerprintalias")
@property
@pulumi.getter
def xforwardedforclientcertfingerprintenabled(self) -> bool:
"""
Indicates Whether the `X-Forwarded-Clientcert-fingerprint` Header Field Is Used to Obtain Access to the Server Load Balancer Instance of the Client Certificate Fingerprint Value.
"""
return pulumi.get(self, "xforwardedforclientcertfingerprintenabled")
@property
@pulumi.getter
def xforwardedforclientcertsubjectdnalias(self) -> str:
"""
The name of the custom header. This parameter is valid only if `xforwardedforclientcertsubjectdnenabled` is set to true. The name must be 1 to 40 characters in length, and can contain letters, hyphens (-), underscores (_), and digits.
"""
return pulumi.get(self, "xforwardedforclientcertsubjectdnalias")
@property
@pulumi.getter
def xforwardedforclientcertsubjectdnenabled(self) -> bool:
"""
Specifies whether to use the `X-Forwarded-Clientcert-subjectdn` header field to obtain information about the owner of the ALB client certificate. Valid values: true and false. Default value: false.
"""
return pulumi.get(self, "xforwardedforclientcertsubjectdnenabled")
@property
@pulumi.getter
def xforwardedforclientsrcportenabled(self) -> bool:
"""
Indicates Whether the X-Forwarded-Client-Port Header Field Is Used to Obtain Access to Server Load Balancer Instances to the Client, and Those of the Ports.
"""
return pulumi.get(self, "xforwardedforclientsrcportenabled")
@property
@pulumi.getter
def xforwardedforenabled(self) -> bool:
"""
Indicates whether the X-Forwarded-For header field is used to obtain the real IP address of tqhe client. Valid values: true and false. Default value: true.
"""
return pulumi.get(self, "xforwardedforenabled")
@property
@pulumi.getter
def xforwardedforprotoenabled(self) -> bool:
"""
Indicates Whether the X-Forwarded-Proto Header Field Is Used to Obtain the Server Load Balancer Instance Snooping Protocols.
"""
return pulumi.get(self, "xforwardedforprotoenabled")
@property
@pulumi.getter
def xforwardedforslbidenabled(self) -> bool:
"""
Indicates whether the SLB-ID header field is used to obtain the ID of the ALB instance. Valid values: true and false. Default value: false.
"""
return pulumi.get(self, "xforwardedforslbidenabled")
@property
@pulumi.getter
def xforwardedforslbportenabled(self) -> bool:
"""
Indicates Whether the X-Forwarded-Port Header Field Is Used to Obtain the Server Load Balancer Instance Listening Port.
"""
return pulumi.get(self, "xforwardedforslbportenabled")
@pulumi.output_type
class GetLoadBalancersBalancerResult(dict):
def __init__(__self__, *,
access_log_configs: Sequence['outputs.GetLoadBalancersBalancerAccessLogConfigResult'],
address_allocated_mode: str,
address_type: str,
bandwidth_package_id: str,
create_time: str,
deletion_protection_configs: Sequence['outputs.GetLoadBalancersBalancerDeletionProtectionConfigResult'],
dns_name: str,
id: str,
load_balancer_billing_configs: Sequence['outputs.GetLoadBalancersBalancerLoadBalancerBillingConfigResult'],
load_balancer_bussiness_status: str,
load_balancer_edition: str,
load_balancer_id: str,
load_balancer_name: str,
load_balancer_operation_locks: Sequence['outputs.GetLoadBalancersBalancerLoadBalancerOperationLockResult'],
modification_protection_configs: Sequence['outputs.GetLoadBalancersBalancerModificationProtectionConfigResult'],
resource_group_id: str,
status: str,
tags: Mapping[str, Any],
vpc_id: str,
zone_mappings: Sequence['outputs.GetLoadBalancersBalancerZoneMappingResult']):
"""
:param Sequence['GetLoadBalancersBalancerAccessLogConfigArgs'] access_log_configs: The Access Logging Configuration Structure.
:param str address_allocated_mode: The method in which IP addresses are assigned. Valid values: Fixed: The ALB instance
uses a fixed IP address. Dynamic (default): An IP address is dynamically assigned to each zone of the ALB
instance.
:param str address_type: The type of IP address that the ALB instance uses to provide services.
:param str bandwidth_package_id: The ID of the EIP bandwidth plan which is associated with an ALB instance that uses a
public IP address.
:param str create_time: The creation time of the resource.
:param Sequence['GetLoadBalancersBalancerDeletionProtectionConfigArgs'] deletion_protection_configs: Remove the Protection Configuration.
:param str dns_name: DNS Domain Name.
:param str id: The ID of the Load Balancer.
:param Sequence['GetLoadBalancersBalancerLoadBalancerBillingConfigArgs'] load_balancer_billing_configs: The configuration of the billing method.
:param str load_balancer_bussiness_status: Load Balancing of the Service Status. Valid Values: `Abnormal` and `Normal`.
:param str load_balancer_edition: The edition of the ALB instance.
:param str load_balancer_id: The first ID of the resource.
:param str load_balancer_name: The name of the resource.
:param Sequence['GetLoadBalancersBalancerLoadBalancerOperationLockArgs'] load_balancer_operation_locks: The Load Balancing Operations Lock Configuration.
:param Sequence['GetLoadBalancersBalancerModificationProtectionConfigArgs'] modification_protection_configs: Modify the Protection Configuration.
:param str resource_group_id: The ID of the resource group.
:param str status: The The load balancer status. Valid values: `Active`, `Configuring`, `CreateFailed`, `Inactive` and `Provisioning`.
:param Mapping[str, Any] tags: The tag of the resource.
:param str vpc_id: The ID of the virtual private cloud (VPC) where the ALB instance is deployed.
:param Sequence['GetLoadBalancersBalancerZoneMappingArgs'] zone_mappings: The zones and vSwitches. You must specify at least two zones.
"""
pulumi.set(__self__, "access_log_configs", access_log_configs)
pulumi.set(__self__, "address_allocated_mode", address_allocated_mode)
pulumi.set(__self__, "address_type", address_type)
pulumi.set(__self__, "bandwidth_package_id", bandwidth_package_id)
pulumi.set(__self__, "create_time", create_time)
pulumi.set(__self__, "deletion_protection_configs", deletion_protection_configs)
pulumi.set(__self__, "dns_name", dns_name)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "load_balancer_billing_configs", load_balancer_billing_configs)
pulumi.set(__self__, "load_balancer_bussiness_status", load_balancer_bussiness_status)
pulumi.set(__self__, "load_balancer_edition", load_balancer_edition)
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
pulumi.set(__self__, "load_balancer_name", load_balancer_name)
pulumi.set(__self__, "load_balancer_operation_locks", load_balancer_operation_locks)
pulumi.set(__self__, "modification_protection_configs", modification_protection_configs)
pulumi.set(__self__, "resource_group_id", resource_group_id)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "tags", tags)
pulumi.set(__self__, "vpc_id", vpc_id)
pulumi.set(__self__, "zone_mappings", zone_mappings)
@property
@pulumi.getter(name="accessLogConfigs")
def access_log_configs(self) -> Sequence['outputs.GetLoadBalancersBalancerAccessLogConfigResult']:
"""
The Access Logging Configuration Structure.
"""
return pulumi.get(self, "access_log_configs")
@property
@pulumi.getter(name="addressAllocatedMode")
def address_allocated_mode(self) -> str:
"""
The method in which IP addresses are assigned. Valid values: Fixed: The ALB instance
uses a fixed IP address. Dynamic (default): An IP address is dynamically assigned to each zone of the ALB
instance.
"""
return pulumi.get(self, "address_allocated_mode")
@property
@pulumi.getter(name="addressType")
def address_type(self) -> str:
"""
The type of IP address that the ALB instance uses to provide services.
"""
return pulumi.get(self, "address_type")
@property
@pulumi.getter(name="bandwidthPackageId")
def bandwidth_package_id(self) -> str:
"""
The ID of the EIP bandwidth plan which is associated with an ALB instance that uses a
public IP address.
"""
return pulumi.get(self, "bandwidth_package_id")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> str:
"""
The creation time of the resource.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter(name="deletionProtectionConfigs")
def deletion_protection_configs(self) -> Sequence['outputs.GetLoadBalancersBalancerDeletionProtectionConfigResult']:
"""
Remove the Protection Configuration.
"""
return pulumi.get(self, "deletion_protection_configs")
@property
@pulumi.getter(name="dnsName")
def dns_name(self) -> str:
"""
DNS Domain Name.
"""
return pulumi.get(self, "dns_name")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Load Balancer.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="loadBalancerBillingConfigs")
def load_balancer_billing_configs(self) -> Sequence['outputs.GetLoadBalancersBalancerLoadBalancerBillingConfigResult']:
"""
The configuration of the billing method.
"""
return pulumi.get(self, "load_balancer_billing_configs")
@property
@pulumi.getter(name="loadBalancerBussinessStatus")
def load_balancer_bussiness_status(self) -> str:
"""
Load Balancing of the Service Status. Valid Values: `Abnormal` and `Normal`.
"""
return pulumi.get(self, "load_balancer_bussiness_status")
@property
@pulumi.getter(name="loadBalancerEdition")
def load_balancer_edition(self) -> str:
"""
The edition of the ALB instance.
"""
return pulumi.get(self, "load_balancer_edition")
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> str:
"""
The first ID of the resource.
"""
return pulumi.get(self, "load_balancer_id")
@property
@pulumi.getter(name="loadBalancerName")
def load_balancer_name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "load_balancer_name")
@property
@pulumi.getter(name="loadBalancerOperationLocks")
def load_balancer_operation_locks(self) -> Sequence['outputs.GetLoadBalancersBalancerLoadBalancerOperationLockResult']:
"""
The Load Balancing Operations Lock Configuration.
"""
return pulumi.get(self, "load_balancer_operation_locks")
@property
@pulumi.getter(name="modificationProtectionConfigs")
def modification_protection_configs(self) -> Sequence['outputs.GetLoadBalancersBalancerModificationProtectionConfigResult']:
"""
Modify the Protection Configuration.
"""
return pulumi.get(self, "modification_protection_configs")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> str:
"""
The ID of the resource group.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter
def status(self) -> str:
"""
The The load balancer status. Valid values: `Active`, `Configuring`, `CreateFailed`, `Inactive` and `Provisioning`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def tags(self) -> Mapping[str, Any]:
"""
The tag of the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> str:
"""
The ID of the virtual private cloud (VPC) where the ALB instance is deployed.
"""
return pulumi.get(self, "vpc_id")
@property
@pulumi.getter(name="zoneMappings")
def zone_mappings(self) -> Sequence['outputs.GetLoadBalancersBalancerZoneMappingResult']:
"""
The zones and vSwitches. You must specify at least two zones.
"""
return pulumi.get(self, "zone_mappings")
@pulumi.output_type
class GetLoadBalancersBalancerAccessLogConfigResult(dict):
def __init__(__self__, *,
log_project: str,
log_store: str):
"""
:param str log_project: The log service that access logs are shipped to.
:param str log_store: The logstore that access logs are shipped to.
"""
pulumi.set(__self__, "log_project", log_project)
pulumi.set(__self__, "log_store", log_store)
@property
@pulumi.getter(name="logProject")
def log_project(self) -> str:
"""
The log service that access logs are shipped to.
"""
return pulumi.get(self, "log_project")
@property
@pulumi.getter(name="logStore")
def log_store(self) -> str:
"""
The logstore that access logs are shipped to.
"""
return pulumi.get(self, "log_store")
@pulumi.output_type
class GetLoadBalancersBalancerDeletionProtectionConfigResult(dict):
def __init__(__self__, *,
enabled: bool,
enabled_time: str):
"""
:param bool enabled: Remove the Protection Status.
:param str enabled_time: Deletion Protection Turn-on Time Use Greenwich Mean Time, in the Format of Yyyy-MM-ddTHH: mm:SSZ.
"""
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "enabled_time", enabled_time)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Remove the Protection Status.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="enabledTime")
def enabled_time(self) -> str:
"""
Deletion Protection Turn-on Time Use Greenwich Mean Time, in the Format of Yyyy-MM-ddTHH: mm:SSZ.
"""
return pulumi.get(self, "enabled_time")
@pulumi.output_type
class GetLoadBalancersBalancerLoadBalancerBillingConfigResult(dict):
def __init__(__self__, *,
pay_type: str):
"""
:param str pay_type: The billing method of the ALB instance. Valid value: `PayAsYouGo`.
"""
pulumi.set(__self__, "pay_type", pay_type)
@property
@pulumi.getter(name="payType")
def pay_type(self) -> str:
"""
The billing method of the ALB instance. Valid value: `PayAsYouGo`.
"""
return pulumi.get(self, "pay_type")
@pulumi.output_type
class GetLoadBalancersBalancerLoadBalancerOperationLockResult(dict):
def __init__(__self__, *,
lock_reason: str,
lock_type: str):
"""
:param str lock_reason: The Locking of the Reasons. In 'loadbalancerbussinessstatus' **Exception When Effective,.
:param str lock_type: The Locking of the Type. Valid Values: `securitylocked`,`relatedresourcelocked`, `financiallocked`, and `residuallocked`.
"""
pulumi.set(__self__, "lock_reason", lock_reason)
pulumi.set(__self__, "lock_type", lock_type)
@property
@pulumi.getter(name="lockReason")
def lock_reason(self) -> str:
"""
The Locking of the Reasons. In 'loadbalancerbussinessstatus' **Exception When Effective,.
"""
return pulumi.get(self, "lock_reason")
@property
@pulumi.getter(name="lockType")
def lock_type(self) -> str:
"""
The Locking of the Type. Valid Values: `securitylocked`,`relatedresourcelocked`, `financiallocked`, and `residuallocked`.
"""
return pulumi.get(self, "lock_type")
@pulumi.output_type
class GetLoadBalancersBalancerModificationProtectionConfigResult(dict):
def __init__(__self__, *,
reason: str,
status: str):
"""
:param str reason: The reason for modification protection. This parameter must be 2 to 128 characters in length, and can contain letters, digits, periods, underscores, and hyphens. The reason must start with a letter. This parameter is required only if `ModificationProtectionStatus` is set to `ConsoleProtection`.
:param str status: The The load balancer status. Valid values: `Active`, `Configuring`, `CreateFailed`, `Inactive` and `Provisioning`.
"""
pulumi.set(__self__, "reason", reason)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def reason(self) -> str:
"""
The reason for modification protection. This parameter must be 2 to 128 characters in length, and can contain letters, digits, periods, underscores, and hyphens. The reason must start with a letter. This parameter is required only if `ModificationProtectionStatus` is set to `ConsoleProtection`.
"""
return pulumi.get(self, "reason")
@property
@pulumi.getter
def status(self) -> str:
"""
The The load balancer status. Valid values: `Active`, `Configuring`, `CreateFailed`, `Inactive` and `Provisioning`.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class GetLoadBalancersBalancerZoneMappingResult(dict):
def __init__(__self__, *,
load_balancer_addresses: Sequence['outputs.GetLoadBalancersBalancerZoneMappingLoadBalancerAddressResult'],
vswitch_id: str,
zone_id: str):
"""
:param str vswitch_id: The ID of the vSwitch that corresponds to the zone. Each zone can use only one vSwitch and subnet.
:param str zone_id: The ID of the zone to which the ALB instance belongs.
"""
pulumi.set(__self__, "load_balancer_addresses", load_balancer_addresses)
pulumi.set(__self__, "vswitch_id", vswitch_id)
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="loadBalancerAddresses")
def load_balancer_addresses(self) -> Sequence['outputs.GetLoadBalancersBalancerZoneMappingLoadBalancerAddressResult']:
return pulumi.get(self, "load_balancer_addresses")
@property
@pulumi.getter(name="vswitchId")
def vswitch_id(self) -> str:
"""
The ID of the vSwitch that corresponds to the zone. Each zone can use only one vSwitch and subnet.
"""
return pulumi.get(self, "vswitch_id")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> str:
"""
The ID of the zone to which the ALB instance belongs.
"""
return pulumi.get(self, "zone_id")
@pulumi.output_type
class GetLoadBalancersBalancerZoneMappingLoadBalancerAddressResult(dict):
def __init__(__self__, *,
address: str):
pulumi.set(__self__, "address", address)
@property
@pulumi.getter
def address(self) -> str:
return pulumi.get(self, "address")
@pulumi.output_type
class GetRulesRuleResult(dict):
def __init__(__self__, *,
id: str,
listener_id: str,
load_balancer_id: str,
priority: int,
rule_actions: Sequence['outputs.GetRulesRuleRuleActionResult'],
rule_conditions: Sequence['outputs.GetRulesRuleRuleConditionResult'],
rule_id: str,
rule_name: str,
status: str):
"""
:param str id: The ID of the Rule.
:param str listener_id: The ID of the listener to which the forwarding rule belongs.
:param str load_balancer_id: The ID of the Application Load Balancer (ALB) instance to which the forwarding rule belongs.
:param int priority: The priority of the rule. Valid values: 1 to 10000. A smaller value indicates a higher priority. Note The priority of each rule within the same listener must be unique.
:param Sequence['GetRulesRuleRuleActionArgs'] rule_actions: The actions of the forwarding rules.
:param Sequence['GetRulesRuleRuleConditionArgs'] rule_conditions: The conditions of the forwarding rule.
:param str rule_id: The first ID of the resource.
:param str rule_name: The name of the forwarding rule. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
:param str status: The status of the resource.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "listener_id", listener_id)
pulumi.set(__self__, "load_balancer_id", load_balancer_id)
pulumi.set(__self__, "priority", priority)
pulumi.set(__self__, "rule_actions", rule_actions)
pulumi.set(__self__, "rule_conditions", rule_conditions)
pulumi.set(__self__, "rule_id", rule_id)
pulumi.set(__self__, "rule_name", rule_name)
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Rule.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="listenerId")
def listener_id(self) -> str:
"""
The ID of the listener to which the forwarding rule belongs.
"""
return pulumi.get(self, "listener_id")
@property
@pulumi.getter(name="loadBalancerId")
def load_balancer_id(self) -> str:
"""
The ID of the Application Load Balancer (ALB) instance to which the forwarding rule belongs.
"""
return pulumi.get(self, "load_balancer_id")
@property
@pulumi.getter
def priority(self) -> int:
"""
The priority of the rule. Valid values: 1 to 10000. A smaller value indicates a higher priority. Note The priority of each rule within the same listener must be unique.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter(name="ruleActions")
def rule_actions(self) -> Sequence['outputs.GetRulesRuleRuleActionResult']:
"""
The actions of the forwarding rules.
"""
return pulumi.get(self, "rule_actions")
@property
@pulumi.getter(name="ruleConditions")
def rule_conditions(self) -> Sequence['outputs.GetRulesRuleRuleConditionResult']:
"""
The conditions of the forwarding rule.
"""
return pulumi.get(self, "rule_conditions")
@property
@pulumi.getter(name="ruleId")
def rule_id(self) -> str:
"""
The first ID of the resource.
"""
return pulumi.get(self, "rule_id")
@property
@pulumi.getter(name="ruleName")
def rule_name(self) -> str:
"""
The name of the forwarding rule. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
"""
return pulumi.get(self, "rule_name")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the resource.
"""
return pulumi.get(self, "status")
@pulumi.output_type
class GetRulesRuleRuleActionResult(dict):
def __init__(__self__, *,
fixed_response_configs: Sequence['outputs.GetRulesRuleRuleActionFixedResponseConfigResult'],
forward_group_configs: Sequence['outputs.GetRulesRuleRuleActionForwardGroupConfigResult'],
insert_header_configs: Sequence['outputs.GetRulesRuleRuleActionInsertHeaderConfigResult'],
order: int,
redirect_configs: Sequence['outputs.GetRulesRuleRuleActionRedirectConfigResult'],
rewrite_configs: Sequence['outputs.GetRulesRuleRuleActionRewriteConfigResult'],
type: str):
"""
:param Sequence['GetRulesRuleRuleActionFixedResponseConfigArgs'] fixed_response_configs: The configuration of the fixed response.
:param Sequence['GetRulesRuleRuleActionForwardGroupConfigArgs'] forward_group_configs: The configurations of the destination server groups.
:param Sequence['GetRulesRuleRuleActionInsertHeaderConfigArgs'] insert_header_configs: The configuration of the inserted header field.
:param int order: The order of the forwarding rule actions. Valid values:1 to 50000. The actions are performed in ascending order. You cannot leave this parameter empty. Each value must be unique.
:param Sequence['GetRulesRuleRuleActionRedirectConfigArgs'] redirect_configs: The configuration of the external redirect action.
:param Sequence['GetRulesRuleRuleActionRewriteConfigArgs'] rewrite_configs: The redirect action within ALB.
:param str type: The type of the forwarding rule.
"""
pulumi.set(__self__, "fixed_response_configs", fixed_response_configs)
pulumi.set(__self__, "forward_group_configs", forward_group_configs)
pulumi.set(__self__, "insert_header_configs", insert_header_configs)
pulumi.set(__self__, "order", order)
pulumi.set(__self__, "redirect_configs", redirect_configs)
pulumi.set(__self__, "rewrite_configs", rewrite_configs)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="fixedResponseConfigs")
def fixed_response_configs(self) -> Sequence['outputs.GetRulesRuleRuleActionFixedResponseConfigResult']:
"""
The configuration of the fixed response.
"""
return pulumi.get(self, "fixed_response_configs")
@property
@pulumi.getter(name="forwardGroupConfigs")
def forward_group_configs(self) -> Sequence['outputs.GetRulesRuleRuleActionForwardGroupConfigResult']:
"""
The configurations of the destination server groups.
"""
return pulumi.get(self, "forward_group_configs")
@property
@pulumi.getter(name="insertHeaderConfigs")
def insert_header_configs(self) -> Sequence['outputs.GetRulesRuleRuleActionInsertHeaderConfigResult']:
"""
The configuration of the inserted header field.
"""
return pulumi.get(self, "insert_header_configs")
@property
@pulumi.getter
def order(self) -> int:
"""
The order of the forwarding rule actions. Valid values:1 to 50000. The actions are performed in ascending order. You cannot leave this parameter empty. Each value must be unique.
"""
return pulumi.get(self, "order")
@property
@pulumi.getter(name="redirectConfigs")
def redirect_configs(self) -> Sequence['outputs.GetRulesRuleRuleActionRedirectConfigResult']:
"""
The configuration of the external redirect action.
"""
return pulumi.get(self, "redirect_configs")
@property
@pulumi.getter(name="rewriteConfigs")
def rewrite_configs(self) -> Sequence['outputs.GetRulesRuleRuleActionRewriteConfigResult']:
"""
The redirect action within ALB.
"""
return pulumi.get(self, "rewrite_configs")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the forwarding rule.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetRulesRuleRuleActionFixedResponseConfigResult(dict):
def __init__(__self__, *,
content: str,
content_type: str,
http_code: str):
"""
:param str content: The fixed response. The response cannot exceed 1 KB in size and can contain only ASCII characters.
:param str content_type: The format of the fixed response. Valid values: text/plain, text/css, text/html, application/javascript, and application/json.
:param str http_code: The redirect method. Valid values:301, 302, 303, 307, and 308.
"""
pulumi.set(__self__, "content", content)
pulumi.set(__self__, "content_type", content_type)
pulumi.set(__self__, "http_code", http_code)
@property
@pulumi.getter
def content(self) -> str:
"""
The fixed response. The response cannot exceed 1 KB in size and can contain only ASCII characters.
"""
return pulumi.get(self, "content")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> str:
"""
The format of the fixed response. Valid values: text/plain, text/css, text/html, application/javascript, and application/json.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter(name="httpCode")
def http_code(self) -> str:
"""
The redirect method. Valid values:301, 302, 303, 307, and 308.
"""
return pulumi.get(self, "http_code")
@pulumi.output_type
class GetRulesRuleRuleActionForwardGroupConfigResult(dict):
def __init__(__self__, *,
server_group_tuples: Sequence['outputs.GetRulesRuleRuleActionForwardGroupConfigServerGroupTupleResult']):
"""
:param Sequence['GetRulesRuleRuleActionForwardGroupConfigServerGroupTupleArgs'] server_group_tuples: The destination server group to which requests are forwarded.
"""
pulumi.set(__self__, "server_group_tuples", server_group_tuples)
@property
@pulumi.getter(name="serverGroupTuples")
def server_group_tuples(self) -> Sequence['outputs.GetRulesRuleRuleActionForwardGroupConfigServerGroupTupleResult']:
"""
The destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_tuples")
@pulumi.output_type
class GetRulesRuleRuleActionForwardGroupConfigServerGroupTupleResult(dict):
def __init__(__self__, *,
server_group_id: str):
"""
:param str server_group_id: The ID of the destination server group to which requests are forwarded.
"""
pulumi.set(__self__, "server_group_id", server_group_id)
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> str:
"""
The ID of the destination server group to which requests are forwarded.
"""
return pulumi.get(self, "server_group_id")
@pulumi.output_type
class GetRulesRuleRuleActionInsertHeaderConfigResult(dict):
def __init__(__self__, *,
key: str,
value: str,
value_type: str):
"""
:param str key: The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
:param str value: The value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters, asterisks (*), and question marks (?). The value cannot contain spaces or the following special characters: # [ ] { } \ | < > &.
:param str value_type: Valid values: UserDefined: a custom value ReferenceHeader: uses a field of the user request header. SystemDefined: a system value.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
pulumi.set(__self__, "value_type", value_type)
@property
@pulumi.getter
def key(self) -> str:
"""
The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
"""
The value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters, asterisks (*), and question marks (?). The value cannot contain spaces or the following special characters: # [ ] { } \ | < > &.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter(name="valueType")
def value_type(self) -> str:
"""
Valid values: UserDefined: a custom value ReferenceHeader: uses a field of the user request header. SystemDefined: a system value.
"""
return pulumi.get(self, "value_type")
@pulumi.output_type
class GetRulesRuleRuleActionRedirectConfigResult(dict):
def __init__(__self__, *,
host: str,
http_code: str,
path: str,
port: str,
protocol: str,
query: str):
"""
:param str host: The host name of the destination to which requests are redirected within ALB. Valid values: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
:param str http_code: The redirect method. Valid values:301, 302, 303, 307, and 308.
:param str path: The path to which requests are to be redirected within ALB. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?)and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. This value can be used only once. You can use it with a valid string.
:param str port: The port of the destination to which requests are redirected. Valid values: 1 to 63335. Default value: ${port}. You cannot use this value together with other characters at the same time.
:param str protocol: The protocol of the requests to be redirected. Valid values: HTTP and HTTPS. Default value: ${protocol}. You cannot use this value together with other characters at the same time. Note HTTPS listeners can redirect only HTTPS requests.
:param str query: The query string of the request to be redirected within ALB. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. This value can be used only once. You can use it with a valid string.
"""
pulumi.set(__self__, "host", host)
pulumi.set(__self__, "http_code", http_code)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "query", query)
@property
@pulumi.getter
def host(self) -> str:
"""
The host name of the destination to which requests are redirected within ALB. Valid values: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter(name="httpCode")
def http_code(self) -> str:
"""
The redirect method. Valid values:301, 302, 303, 307, and 308.
"""
return pulumi.get(self, "http_code")
@property
@pulumi.getter
def path(self) -> str:
"""
The path to which requests are to be redirected within ALB. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?)and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. This value can be used only once. You can use it with a valid string.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def port(self) -> str:
"""
The port of the destination to which requests are redirected. Valid values: 1 to 63335. Default value: ${port}. You cannot use this value together with other characters at the same time.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def protocol(self) -> str:
"""
The protocol of the requests to be redirected. Valid values: HTTP and HTTPS. Default value: ${protocol}. You cannot use this value together with other characters at the same time. Note HTTPS listeners can redirect only HTTPS requests.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def query(self) -> str:
"""
The query string of the request to be redirected within ALB. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. This value can be used only once. You can use it with a valid string.
"""
return pulumi.get(self, "query")
@pulumi.output_type
class GetRulesRuleRuleActionRewriteConfigResult(dict):
def __init__(__self__, *,
host: str,
path: str,
query: str):
"""
:param str host: The host name of the destination to which requests are redirected within ALB. Valid values: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
:param str path: The path to which requests are to be redirected within ALB. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?)and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. This value can be used only once. You can use it with a valid string.
:param str query: The query string of the request to be redirected within ALB. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. This value can be used only once. You can use it with a valid string.
"""
pulumi.set(__self__, "host", host)
pulumi.set(__self__, "path", path)
pulumi.set(__self__, "query", query)
@property
@pulumi.getter
def host(self) -> str:
"""
The host name of the destination to which requests are redirected within ALB. Valid values: The host name must be 3 to 128 characters in length, and can contain letters, digits, hyphens (-), periods (.), asterisks (*), and question marks (?). The host name must contain at least one period (.), and cannot start or end with a period (.). The rightmost domain label can contain only letters, asterisks (*) and question marks (?) and cannot contain digits or hyphens (-). Other domain labels cannot start or end with a hyphen (-). You can include asterisks (*) and question marks (?) anywhere in a domain label. Default value: ${host}. You cannot use this value with other characters at the same time.
"""
return pulumi.get(self, "host")
@property
@pulumi.getter
def path(self) -> str:
"""
The path to which requests are to be redirected within ALB. Valid values: The path must be 1 to 128 characters in length, and start with a forward slash (/). The path can contain letters, digits, asterisks (*), question marks (?)and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ”. The path is case-sensitive. Default value: ${path}. This value can be used only once. You can use it with a valid string.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def query(self) -> str:
"""
The query string of the request to be redirected within ALB. The query string must be 1 to 128 characters in length, can contain letters and printable characters. It cannot contain the following special characters: # [ ] { } \ | < > &. Default value: ${query}. This value can be used only once. You can use it with a valid string.
"""
return pulumi.get(self, "query")
@pulumi.output_type
class GetRulesRuleRuleConditionResult(dict):
def __init__(__self__, *,
cookie_configs: Sequence['outputs.GetRulesRuleRuleConditionCookieConfigResult'],
header_configs: Sequence['outputs.GetRulesRuleRuleConditionHeaderConfigResult'],
host_configs: Sequence['outputs.GetRulesRuleRuleConditionHostConfigResult'],
method_configs: Sequence['outputs.GetRulesRuleRuleConditionMethodConfigResult'],
path_configs: Sequence['outputs.GetRulesRuleRuleConditionPathConfigResult'],
query_string_configs: Sequence['outputs.GetRulesRuleRuleConditionQueryStringConfigResult'],
type: str):
"""
:param Sequence['GetRulesRuleRuleConditionCookieConfigArgs'] cookie_configs: The configuration of the cookie.
:param Sequence['GetRulesRuleRuleConditionHeaderConfigArgs'] header_configs: The configuration of the header field.
:param Sequence['GetRulesRuleRuleConditionHostConfigArgs'] host_configs: The configuration of the host.
:param Sequence['GetRulesRuleRuleConditionMethodConfigArgs'] method_configs: The configuration of the request method.
:param Sequence['GetRulesRuleRuleConditionPathConfigArgs'] path_configs: The configuration of the path for the request to be forwarded.
:param Sequence['GetRulesRuleRuleConditionQueryStringConfigArgs'] query_string_configs: The configuration of the query string.
:param str type: The type of the forwarding rule.
"""
pulumi.set(__self__, "cookie_configs", cookie_configs)
pulumi.set(__self__, "header_configs", header_configs)
pulumi.set(__self__, "host_configs", host_configs)
pulumi.set(__self__, "method_configs", method_configs)
pulumi.set(__self__, "path_configs", path_configs)
pulumi.set(__self__, "query_string_configs", query_string_configs)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="cookieConfigs")
def cookie_configs(self) -> Sequence['outputs.GetRulesRuleRuleConditionCookieConfigResult']:
"""
The configuration of the cookie.
"""
return pulumi.get(self, "cookie_configs")
@property
@pulumi.getter(name="headerConfigs")
def header_configs(self) -> Sequence['outputs.GetRulesRuleRuleConditionHeaderConfigResult']:
"""
The configuration of the header field.
"""
return pulumi.get(self, "header_configs")
@property
@pulumi.getter(name="hostConfigs")
def host_configs(self) -> Sequence['outputs.GetRulesRuleRuleConditionHostConfigResult']:
"""
The configuration of the host.
"""
return pulumi.get(self, "host_configs")
@property
@pulumi.getter(name="methodConfigs")
def method_configs(self) -> Sequence['outputs.GetRulesRuleRuleConditionMethodConfigResult']:
"""
The configuration of the request method.
"""
return pulumi.get(self, "method_configs")
@property
@pulumi.getter(name="pathConfigs")
def path_configs(self) -> Sequence['outputs.GetRulesRuleRuleConditionPathConfigResult']:
"""
The configuration of the path for the request to be forwarded.
"""
return pulumi.get(self, "path_configs")
@property
@pulumi.getter(name="queryStringConfigs")
def query_string_configs(self) -> Sequence['outputs.GetRulesRuleRuleConditionQueryStringConfigResult']:
"""
The configuration of the query string.
"""
return pulumi.get(self, "query_string_configs")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the forwarding rule.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class GetRulesRuleRuleConditionCookieConfigResult(dict):
def __init__(__self__, *,
values: Sequence['outputs.GetRulesRuleRuleConditionCookieConfigValueResult']):
"""
:param Sequence['GetRulesRuleRuleConditionCookieConfigValueArgs'] values: The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence['outputs.GetRulesRuleRuleConditionCookieConfigValueResult']:
"""
The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
return pulumi.get(self, "values")
@pulumi.output_type
class GetRulesRuleRuleConditionCookieConfigValueResult(dict):
def __init__(__self__, *,
key: str,
value: str):
"""
:param str key: The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
:param str value: The value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters, asterisks (*), and question marks (?). The value cannot contain spaces or the following special characters: # [ ] { } \ | < > &.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> str:
"""
The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
"""
The value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters, asterisks (*), and question marks (?). The value cannot contain spaces or the following special characters: # [ ] { } \ | < > &.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class GetRulesRuleRuleConditionHeaderConfigResult(dict):
def __init__(__self__, *,
key: str,
values: Sequence[str]):
"""
:param str key: The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
:param Sequence[str] values: The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def key(self) -> str:
"""
The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
return pulumi.get(self, "values")
@pulumi.output_type
class GetRulesRuleRuleConditionHostConfigResult(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
return pulumi.get(self, "values")
@pulumi.output_type
class GetRulesRuleRuleConditionMethodConfigResult(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
return pulumi.get(self, "values")
@pulumi.output_type
class GetRulesRuleRuleConditionPathConfigResult(dict):
def __init__(__self__, *,
values: Sequence[str]):
"""
:param Sequence[str] values: The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence[str]:
"""
The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
return pulumi.get(self, "values")
@pulumi.output_type
class GetRulesRuleRuleConditionQueryStringConfigResult(dict):
def __init__(__self__, *,
values: Sequence['outputs.GetRulesRuleRuleConditionQueryStringConfigValueResult']):
"""
:param Sequence['GetRulesRuleRuleConditionQueryStringConfigValueArgs'] values: The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
pulumi.set(__self__, "values", values)
@property
@pulumi.getter
def values(self) -> Sequence['outputs.GetRulesRuleRuleConditionQueryStringConfigValueResult']:
"""
The path of the request to be forwarded. The path must be 1 to 128 characters in length and must start with a forward slash (/). The path can contain letters, digits, and the following special characters: $ - _ . + / & ~ @ :. It cannot contain the following special characters: " % # ; ! ( ) [ ] ^ , ". The value is case-sensitive, and can contain asterisks (*) and question marks (?).
"""
return pulumi.get(self, "values")
@pulumi.output_type
class GetRulesRuleRuleConditionQueryStringConfigValueResult(dict):
def __init__(__self__, *,
key: str,
value: str):
"""
:param str key: The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
:param str value: The value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters, asterisks (*), and question marks (?). The value cannot contain spaces or the following special characters: # [ ] { } \ | < > &.
"""
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def key(self) -> str:
"""
The key of the header field. The key must be 1 to 40 characters in length, and can contain letters, digits, hyphens (-) and underscores (_). The key does not support Cookie or Host.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
"""
The value must be 1 to 128 characters in length, and can contain lowercase letters, printable characters, asterisks (*), and question marks (?). The value cannot contain spaces or the following special characters: # [ ] { } \ | < > &.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class GetSecurityPoliciesPolicyResult(dict):
def __init__(__self__, *,
ciphers: Sequence[str],
id: str,
resource_group_id: str,
security_policy_id: str,
security_policy_name: str,
status: str,
tls_versions: Sequence[str]):
"""
:param Sequence[str] ciphers: The supported cipher suites, which are determined by the TLS protocol version.
:param str id: The ID of the Security Policy.
:param str resource_group_id: The ID of the resource group.
:param str security_policy_id: The first ID of the resource.
:param str security_policy_name: The name of the resource. The name must be 2 to 128 characters in length and must start with a letter. It can contain digits, periods (.), underscores (_), and hyphens (-).
:param str status: The status of the resource.
:param Sequence[str] tls_versions: The TLS protocol versions that are supported. Valid values: TLSv1.0, TLSv1.1, TLSv1.2 and TLSv1.3.
"""
pulumi.set(__self__, "ciphers", ciphers)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "resource_group_id", resource_group_id)
pulumi.set(__self__, "security_policy_id", security_policy_id)
pulumi.set(__self__, "security_policy_name", security_policy_name)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "tls_versions", tls_versions)
@property
@pulumi.getter
def ciphers(self) -> Sequence[str]:
"""
The supported cipher suites, which are determined by the TLS protocol version.
"""
return pulumi.get(self, "ciphers")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Security Policy.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> str:
"""
The ID of the resource group.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> str:
"""
The first ID of the resource.
"""
return pulumi.get(self, "security_policy_id")
@property
@pulumi.getter(name="securityPolicyName")
def security_policy_name(self) -> str:
"""
The name of the resource. The name must be 2 to 128 characters in length and must start with a letter. It can contain digits, periods (.), underscores (_), and hyphens (-).
"""
return pulumi.get(self, "security_policy_name")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the resource.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="tlsVersions")
def tls_versions(self) -> Sequence[str]:
"""
The TLS protocol versions that are supported. Valid values: TLSv1.0, TLSv1.1, TLSv1.2 and TLSv1.3.
"""
return pulumi.get(self, "tls_versions")
@pulumi.output_type
class GetServerGroupsGroupResult(dict):
def __init__(__self__, *,
health_check_configs: Sequence['outputs.GetServerGroupsGroupHealthCheckConfigResult'],
id: str,
protocol: str,
scheduler: str,
server_group_id: str,
server_group_name: str,
servers: Sequence['outputs.GetServerGroupsGroupServerResult'],
status: str,
sticky_session_configs: Sequence['outputs.GetServerGroupsGroupStickySessionConfigResult'],
vpc_id: str):
"""
:param Sequence['GetServerGroupsGroupHealthCheckConfigArgs'] health_check_configs: The configuration of health checks.
:param str id: The ID of the Server Group.
:param str protocol: The server protocol. Valid values: `HTTP` and `HTTPS`. Default value: `HTTP`.
:param str scheduler: The scheduling algorithm. Valid values: `Wrr`, `Wlc` and `Sch`.
:param str server_group_id: The first ID of the res ource.
:param str server_group_name: The name of the resource.
:param Sequence['GetServerGroupsGroupServerArgs'] servers: The backend server.
:param str status: The status of the resource. Valid values: `Provisioning`, `Available` and `Configuring`.
:param Sequence['GetServerGroupsGroupStickySessionConfigArgs'] sticky_session_configs: The configuration of the sticky session.
:param str vpc_id: The ID of the VPC that you want to access.
"""
pulumi.set(__self__, "health_check_configs", health_check_configs)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "scheduler", scheduler)
pulumi.set(__self__, "server_group_id", server_group_id)
pulumi.set(__self__, "server_group_name", server_group_name)
pulumi.set(__self__, "servers", servers)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "sticky_session_configs", sticky_session_configs)
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="healthCheckConfigs")
def health_check_configs(self) -> Sequence['outputs.GetServerGroupsGroupHealthCheckConfigResult']:
"""
The configuration of health checks.
"""
return pulumi.get(self, "health_check_configs")
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the Server Group.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def protocol(self) -> str:
"""
The server protocol. Valid values: `HTTP` and `HTTPS`. Default value: `HTTP`.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def scheduler(self) -> str:
"""
The scheduling algorithm. Valid values: `Wrr`, `Wlc` and `Sch`.
"""
return pulumi.get(self, "scheduler")
@property
@pulumi.getter(name="serverGroupId")
def server_group_id(self) -> str:
"""
The first ID of the res ource.
"""
return pulumi.get(self, "server_group_id")
@property
@pulumi.getter(name="serverGroupName")
def server_group_name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "server_group_name")
@property
@pulumi.getter
def servers(self) -> Sequence['outputs.GetServerGroupsGroupServerResult']:
"""
The backend server.
"""
return pulumi.get(self, "servers")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the resource. Valid values: `Provisioning`, `Available` and `Configuring`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="stickySessionConfigs")
def sticky_session_configs(self) -> Sequence['outputs.GetServerGroupsGroupStickySessionConfigResult']:
"""
The configuration of the sticky session.
"""
return pulumi.get(self, "sticky_session_configs")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> str:
"""
The ID of the VPC that you want to access.
"""
return pulumi.get(self, "vpc_id")
@pulumi.output_type
class GetServerGroupsGroupHealthCheckConfigResult(dict):
def __init__(__self__, *,
health_check_codes: Sequence[str],
health_check_connect_port: int,
health_check_enabled: bool,
health_check_host: str,
health_check_http_version: str,
health_check_interval: int,
health_check_method: str,
health_check_path: str,
health_check_protocol: str,
health_check_timeout: int,
healthy_threshold: int,
unhealthy_threshold: int):
"""
:param Sequence[str] health_check_codes: The status code for a successful health check. Multiple status codes can be specified as a list. Valid values: `http_2xx`, `http_3xx`, `http_4xx`, and `http_5xx`. Default value: `http_2xx`. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param int health_check_connect_port: The port of the backend server that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. A value of `0` indicates that a backend server port is used for health checks.
:param bool health_check_enabled: Indicates whether health checks are enabled. Valid values: `true`, `false`. Default value: `true`.
:param str health_check_host: The domain name that is used for health checks.
:param str health_check_http_version: HTTP protocol version. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param int health_check_interval: The time interval between two consecutive health checks. Unit: seconds. Valid values: `1` to `50`. Default value: `2`.
:param str health_check_method: Health check method. Valid values: `GET` and `HEAD`. Default: `GET`. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param str health_check_path: The forwarding rule path of health checks. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
:param str health_check_protocol: Health check protocol. Valid values: `HTTP` and `TCP`.
:param int health_check_timeout: The timeout period of a health check response. If a backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the ECS instance is considered unhealthy. Unit: seconds. Valid values: `1` to `300`. Default value: `5`. **NOTE:** If the value of the `HealthCHeckTimeout` parameter is smaller than that of the `HealthCheckInterval` parameter, the value of the `HealthCHeckTimeout` parameter is ignored and the value of the `HealthCheckInterval` parameter is regarded as the timeout period.
:param int healthy_threshold: The number of health checks that an unhealthy backend server must pass consecutively before it is declared healthy. In this case, the health check state is changed from fail to success. Valid values: `2` to `10`. Default value: `3`.
:param int unhealthy_threshold: The number of consecutive health checks that a healthy backend server must consecutively fail before it is declared unhealthy. In this case, the health check state is changed from success to fail. Valid values: `2` to `10`. Default value: `3`.
"""
pulumi.set(__self__, "health_check_codes", health_check_codes)
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
pulumi.set(__self__, "health_check_enabled", health_check_enabled)
pulumi.set(__self__, "health_check_host", health_check_host)
pulumi.set(__self__, "health_check_http_version", health_check_http_version)
pulumi.set(__self__, "health_check_interval", health_check_interval)
pulumi.set(__self__, "health_check_method", health_check_method)
pulumi.set(__self__, "health_check_path", health_check_path)
pulumi.set(__self__, "health_check_protocol", health_check_protocol)
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter(name="healthCheckCodes")
def health_check_codes(self) -> Sequence[str]:
"""
The status code for a successful health check. Multiple status codes can be specified as a list. Valid values: `http_2xx`, `http_3xx`, `http_4xx`, and `http_5xx`. Default value: `http_2xx`. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_codes")
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> int:
"""
The port of the backend server that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. A value of `0` indicates that a backend server port is used for health checks.
"""
return pulumi.get(self, "health_check_connect_port")
@property
@pulumi.getter(name="healthCheckEnabled")
def health_check_enabled(self) -> bool:
"""
Indicates whether health checks are enabled. Valid values: `true`, `false`. Default value: `true`.
"""
return pulumi.get(self, "health_check_enabled")
@property
@pulumi.getter(name="healthCheckHost")
def health_check_host(self) -> str:
"""
The domain name that is used for health checks.
"""
return pulumi.get(self, "health_check_host")
@property
@pulumi.getter(name="healthCheckHttpVersion")
def health_check_http_version(self) -> str:
"""
HTTP protocol version. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_http_version")
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> int:
"""
The time interval between two consecutive health checks. Unit: seconds. Valid values: `1` to `50`. Default value: `2`.
"""
return pulumi.get(self, "health_check_interval")
@property
@pulumi.getter(name="healthCheckMethod")
def health_check_method(self) -> str:
"""
Health check method. Valid values: `GET` and `HEAD`. Default: `GET`. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_method")
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> str:
"""
The forwarding rule path of health checks. **NOTE:** This parameter exists if the `HealthCheckProtocol` parameter is set to `HTTP`.
"""
return pulumi.get(self, "health_check_path")
@property
@pulumi.getter(name="healthCheckProtocol")
def health_check_protocol(self) -> str:
"""
Health check protocol. Valid values: `HTTP` and `TCP`.
"""
return pulumi.get(self, "health_check_protocol")
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> int:
"""
The timeout period of a health check response. If a backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the ECS instance is considered unhealthy. Unit: seconds. Valid values: `1` to `300`. Default value: `5`. **NOTE:** If the value of the `HealthCHeckTimeout` parameter is smaller than that of the `HealthCheckInterval` parameter, the value of the `HealthCHeckTimeout` parameter is ignored and the value of the `HealthCheckInterval` parameter is regarded as the timeout period.
"""
return pulumi.get(self, "health_check_timeout")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> int:
"""
The number of health checks that an unhealthy backend server must pass consecutively before it is declared healthy. In this case, the health check state is changed from fail to success. Valid values: `2` to `10`. Default value: `3`.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> int:
"""
The number of consecutive health checks that a healthy backend server must consecutively fail before it is declared unhealthy. In this case, the health check state is changed from success to fail. Valid values: `2` to `10`. Default value: `3`.
"""
return pulumi.get(self, "unhealthy_threshold")
@pulumi.output_type
class GetServerGroupsGroupServerResult(dict):
def __init__(__self__, *,
description: str,
port: int,
server_id: str,
server_ip: str,
server_type: str,
status: str,
weight: int):
"""
:param str description: The description of the server.
:param int port: The port that is used by the server. Valid values: `1` to `65535`.
:param str server_id: The ID of the ECS instance, ENI instance or ECI instance.
:param str server_ip: The IP address of the ENI instance when it is in the inclusive ENI mode.
:param str server_type: The type of the server. The type of the server. Valid values: `Ecs`, `Eni` and `Eci`.
:param str status: The status of the resource. Valid values: `Provisioning`, `Available` and `Configuring`.
:param int weight: The weight of the server. Valid values: `0` to `100`. Default value: `100`. If the value is set to `0`, no requests are forwarded to the server.
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "server_id", server_id)
pulumi.set(__self__, "server_ip", server_ip)
pulumi.set(__self__, "server_type", server_type)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "weight", weight)
@property
@pulumi.getter
def description(self) -> str:
"""
The description of the server.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def port(self) -> int:
"""
The port that is used by the server. Valid values: `1` to `65535`.
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="serverId")
def server_id(self) -> str:
"""
The ID of the ECS instance, ENI instance or ECI instance.
"""
return pulumi.get(self, "server_id")
@property
@pulumi.getter(name="serverIp")
def server_ip(self) -> str:
"""
The IP address of the ENI instance when it is in the inclusive ENI mode.
"""
return pulumi.get(self, "server_ip")
@property
@pulumi.getter(name="serverType")
def server_type(self) -> str:
"""
The type of the server. The type of the server. Valid values: `Ecs`, `Eni` and `Eci`.
"""
return pulumi.get(self, "server_type")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of the resource. Valid values: `Provisioning`, `Available` and `Configuring`.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def weight(self) -> int:
"""
The weight of the server. Valid values: `0` to `100`. Default value: `100`. If the value is set to `0`, no requests are forwarded to the server.
"""
return pulumi.get(self, "weight")
@pulumi.output_type
class GetServerGroupsGroupStickySessionConfigResult(dict):
def __init__(__self__, *,
cookie: str,
cookie_timeout: int,
sticky_session_enabled: bool,
sticky_session_type: str):
"""
:param str cookie: the cookie that is configured on the server. **NOTE:** This parameter exists if the `StickySession` parameter is set to `On` and the `StickySessionType` parameter is set to `server`.
:param int cookie_timeout: The timeout period of a cookie. The timeout period of a cookie. Unit: seconds. Valid values: `1` to `86400`. Default value: `1000`.
:param bool sticky_session_enabled: Indicates whether sticky session is enabled. Values: `true` and `false`. Default value: `false`. **NOTE:** This parameter exists if the `StickySession` parameter is set to `On`.
:param str sticky_session_type: The method that is used to handle a cookie. Values: `Server` and `Insert`.
"""
pulumi.set(__self__, "cookie", cookie)
pulumi.set(__self__, "cookie_timeout", cookie_timeout)
pulumi.set(__self__, "sticky_session_enabled", sticky_session_enabled)
pulumi.set(__self__, "sticky_session_type", sticky_session_type)
@property
@pulumi.getter
def cookie(self) -> str:
"""
the cookie that is configured on the server. **NOTE:** This parameter exists if the `StickySession` parameter is set to `On` and the `StickySessionType` parameter is set to `server`.
"""
return pulumi.get(self, "cookie")
@property
@pulumi.getter(name="cookieTimeout")
def cookie_timeout(self) -> int:
"""
The timeout period of a cookie. The timeout period of a cookie. Unit: seconds. Valid values: `1` to `86400`. Default value: `1000`.
"""
return pulumi.get(self, "cookie_timeout")
@property
@pulumi.getter(name="stickySessionEnabled")
def sticky_session_enabled(self) -> bool:
"""
Indicates whether sticky session is enabled. Values: `true` and `false`. Default value: `false`. **NOTE:** This parameter exists if the `StickySession` parameter is set to `On`.
"""
return pulumi.get(self, "sticky_session_enabled")
@property
@pulumi.getter(name="stickySessionType")
def sticky_session_type(self) -> str:
"""
The method that is used to handle a cookie. Values: `Server` and `Insert`.
"""
return pulumi.get(self, "sticky_session_type")
@pulumi.output_type
class GetZonesZoneResult(dict):
def __init__(__self__, *,
id: str,
local_name: str,
zone_id: str):
"""
:param str id: The ID of zone.
:param str local_name: The local name.
:param str zone_id: The zone ID.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "local_name", local_name)
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of zone.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="localName")
def local_name(self) -> str:
"""
The local name.
"""
return pulumi.get(self, "local_name")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> str:
"""
The zone ID.
"""
return pulumi.get(self, "zone_id")
| 50.107326
| 973
| 0.673406
| 25,988
| 225,032
| 5.667654
| 0.033092
| 0.012221
| 0.026566
| 0.038828
| 0.810321
| 0.768343
| 0.747398
| 0.71669
| 0.704157
| 0.689445
| 0
| 0.005388
| 0.234624
| 225,032
| 4,490
| 974
| 50.118486
| 0.849792
| 0.44223
| 0
| 0.639485
| 1
| 0.008584
| 0.20354
| 0.109414
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173234
| false
| 0
| 0.002341
| 0.001951
| 0.340226
| 0.007023
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79cb96fd1c1d8651ead21055db59b1b822047296
| 12,982
|
py
|
Python
|
src/aggregators.py
|
microsoft/TextGNN
|
a3810bdd651274dff57b7d1e3243ed3b294bd240
|
[
"MIT"
] | 11
|
2021-02-18T13:58:52.000Z
|
2022-02-12T18:07:58.000Z
|
src/aggregators.py
|
microsoft/TextGNN
|
a3810bdd651274dff57b7d1e3243ed3b294bd240
|
[
"MIT"
] | 1
|
2021-07-23T03:33:08.000Z
|
2021-07-27T22:38:22.000Z
|
src/aggregators.py
|
microsoft/TextGNN
|
a3810bdd651274dff57b7d1e3243ed3b294bd240
|
[
"MIT"
] | 7
|
2021-02-05T02:35:43.000Z
|
2021-12-15T05:22:07.000Z
|
## GraphSage aggregators
import logging
import numpy as np
import tensorflow as tf
from transformers.modeling_tf_utils import shape_list
logger = logging.getLogger(__name__)
class MeanAggregator(tf.keras.layers.Layer):
def __init__(self, config, out_dim, activation='relu', identity_act=False, **kwargs):
super().__init__(**kwargs)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
self.concat = config.agg_concat
self.add_bias = config.agg_bias
self.out_dim = out_dim
self.identity_act = identity_act
self.self_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="self_weight"
)
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="neigh_weights"
)
if not self.identity_act:
self.act = tf.keras.layers.Activation(activation)
def build(self, input_shape):
if self.add_bias:
with tf.name_scope('bias'):
self.bias = self.add_weight(
"weight",
shape=[self.out_dim * 2 if self.concat else self.out_dim],
initializer='zeros',
)
super().build(input_shape)
def call(self, inputs, training=False):
self_vecs, neigh_vecs = inputs
neigh_vecs = self.dropout(neigh_vecs, training=training)
self_vecs = self.dropout(self_vecs, training=training)
neigh_means = tf.reduce_mean(neigh_vecs, axis=1)
from_neighs = self.neigh_weights(neigh_means)
from_self = self.self_weights(self_vecs)
if not self.concat:
output = tf.add_n([from_self, from_neighs])
else:
output = tf.concat([from_self, from_neighs], axis=1)
if self.add_bias:
output += self.bias
if self.identity_act: return output
return self.act(output)
class GCNAggregator(tf.keras.layers.Layer):
def __init__(self, config, out_dim, activation='relu', identity_act=False, **kwargs):
super().__init__(**kwargs)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
self.out_dim = out_dim
if identity_act:
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=config.agg_bias, kernel_initializer='glorot_uniform',
name="neigh_weights"
)
else:
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=config.agg_bias, activation=activation, kernel_initializer='glorot_uniform',
name="neigh_weights"
)
def call(self, inputs, training=False):
self_vecs, neigh_vecs = inputs
neigh_vecs = self.dropout(neigh_vecs, training=training)
self_vecs = self.dropout(self_vecs, training=training)
means = tf.reduce_mean(tf.concat([neigh_vecs,
tf.expand_dims(self_vecs, axis=1)], axis=1), axis=1)
return self.neigh_weights(means)
class MaxPoolingAggregator(tf.keras.layers.Layer):
def __init__(self, config, out_dim, activation='relu', identity_act=False, **kwargs):
super().__init__(**kwargs)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
self.concat = config.agg_concat
self.add_bias = config.agg_bias
self.out_dim = out_dim
self.identity_act = identity_act
self.hidden_dim = 512 if config.agg_model_size == 'small' else 1024
self.mlp_layers = []
self.mlp_layers.append(tf.keras.layers.Dense(
self.hidden_dim, activation='relu', kernel_initializer='glorot_uniform',
kernel_regularizer=tf.keras.regularizers.l2(config.weight_decay), name="neigh_mlp"
))
self.self_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="self_weight"
)
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="neigh_weights"
)
if not self.identity_act:
self.act = tf.keras.layers.Activation(activation)
def build(self, input_shape):
if self.add_bias:
with tf.name_scope('bias'):
self.bias = self.add_weight(
"weight",
shape=[self.out_dim * 2 if self.concat else self.out_dim],
initializer='zeros',
)
super().build(input_shape)
def call(self, inputs, training=False):
self_vecs, neigh_vecs = inputs
for l in self.mlp_layers:
neigh_vecs = self.dropout(neigh_vecs, training=training)
neigh_vecs = l(neigh_vecs)
neigh_vecs = tf.reduce_max(neigh_vecs, axis=1)
from_neighs = self.neigh_weights(neigh_vecs)
from_self = self.self_weights(self_vecs)
if not self.concat:
output = tf.add_n([from_self, from_neighs])
else:
output = tf.concat([from_self, from_neighs], axis=1)
if self.add_bias:
output += self.bias
if self.identity_act: return output
return self.act(output)
class MeanPoolingAggregator(tf.keras.layers.Layer):
def __init__(self, config, out_dim, activation='relu', identity_act=False, **kwargs):
super().__init__(**kwargs)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
self.concat = config.agg_concat
self.add_bias = config.agg_bias
self.out_dim = out_dim
self.identity_act = identity_act
self.hidden_dim = 512 if config.agg_model_size == 'small' else 1024
self.mlp_layers = []
self.mlp_layers.append(tf.keras.layers.Dense(
self.hidden_dim, activation='relu', kernel_initializer='glorot_uniform',
kernel_regularizer=tf.keras.regularizers.l2(config.weight_decay), name="neigh_mlp"
))
self.self_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="self_weight"
)
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="neigh_weights"
)
if not self.identity_act:
self.act = tf.keras.layers.Activation(activation)
def build(self, input_shape):
if self.add_bias:
with tf.name_scope('bias'):
self.bias = self.add_weight(
"weight",
shape=[self.out_dim * 2 if self.concat else self.out_dim],
initializer='zeros',
)
super().build(input_shape)
def call(self, inputs, training=False):
self_vecs, neigh_vecs = inputs
for l in self.mlp_layers:
neigh_vecs = self.dropout(neigh_vecs, training=training)
neigh_vecs = l(neigh_vecs)
neigh_vecs = tf.reduce_mean(neigh_vecs, axis=1)
from_neighs = self.neigh_weights(neigh_vecs)
from_self = self.self_weights(self_vecs)
if not self.concat:
output = tf.add_n([from_self, from_neighs])
else:
output = tf.concat([from_self, from_neighs], axis=1)
if self.add_bias:
output += self.bias
if self.identity_act: return output
return self.act(output)
class TwoMaxLayerPoolingAggregator(tf.keras.layers.Layer):
def __init__(self, config, out_dim, activation='relu', identity_act=False, **kwargs):
super().__init__(**kwargs)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
self.concat = config.agg_concat
self.add_bias = config.agg_bias
self.out_dim = out_dim
self.identity_act = identity_act
self.hidden_dim_1 = 512 if config.agg_model_size == 'small' else 1024
self.hidden_dim_1 = 256 if config.agg_model_size == 'small' else 512
self.mlp_layers = []
self.mlp_layers.append(tf.keras.layers.Dense(
self.hidden_dim1, activation='relu', kernel_initializer='glorot_uniform',
kernel_regularizer=tf.keras.regularizers.l2(config.weight_decay), name="neigh_mlp_1"
))
self.mlp_layers.append(tf.keras.layers.Dense(
self.hidden_dim2, activation='relu', kernel_initializer='glorot_uniform',
kernel_regularizer=tf.keras.regularizers.l2(config.weight_decay), name="neigh_mlp_2"
))
self.self_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="self_weight"
)
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="neigh_weights"
)
if not self.identity_act:
self.act = tf.keras.layers.Activation(activation)
def build(self, input_shape):
if self.add_bias:
with tf.name_scope('bias'):
self.bias = self.add_weight(
"weight",
shape=[self.out_dim * 2 if self.concat else self.out_dim],
initializer='zeros',
)
super().build(input_shape)
def call(self, inputs, training=False):
self_vecs, neigh_vecs = inputs
for l in self.mlp_layers:
neigh_vecs = self.dropout(neigh_vecs, training=training)
neigh_vecs = l(neigh_vecs)
neigh_vecs = tf.reduce_max(neigh_vecs, axis=1)
from_neighs = self.neigh_weights(neigh_vecs)
from_self = self.self_weights(self_vecs)
if not self.concat:
output = tf.add_n([from_self, from_neighs])
else:
output = tf.concat([from_self, from_neighs], axis=1)
if self.add_bias:
output += self.bias
if self.identity_act: return output
return self.act(output)
class SeqAggregator(tf.keras.layers.Layer):
def __init__(self, config, out_dim, activation='relu', identity_act=False, **kwargs):
super().__init__(**kwargs)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
self.concat = config.agg_concat
self.add_bias = config.agg_bias
self.out_dim = out_dim
self.identity_act = identity_act
self.hidden_dim = 128 if config.agg_model_size == 'small' else 256
self.lstm = tf.keras.layers.LSTM(self.hidden_dim)
self.self_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="self_weight"
)
self.neigh_weights = tf.keras.layers.Dense(
self.out_dim, use_bias=False, kernel_initializer='glorot_uniform', name="neigh_weights"
)
if not self.identity_act:
self.act = tf.keras.layers.Activation(activation)
def build(self, input_shape):
if self.add_bias:
with tf.name_scope('bias'):
self.bias = self.add_weight(
"weight",
shape=[self.out_dim * 2 if self.concat else self.out_dim],
initializer='zeros',
)
super().build(input_shape)
def call(self, inputs, training=False):
self_vecs, neigh_vecs = inputs
mask = tf.cast(tf.sign(tf.reduce_max(tf.abs(x), axis=2)), dtype=tf.bool)
batch_size = shape_list(mask)[0]
mask = tf.concat([tf.constant(np.ones([batch_size, 1]), dtype=tf.bool), mask[:, 1:]], axis=1)
rnn_outputs = self.lstm(inputs=neigh_vecs, mask=mask)
from_neighs = self.neigh_weights(rnn_outputs)
from_self = self.self_weights(self_vecs)
if not self.concat:
output = tf.add_n([from_self, from_neighs])
else:
output = tf.concat([from_self, from_neighs], axis=1)
if self.add_bias:
output += self.bias
if self.identity_act: return output
return self.act(output)
class NodePredict(tf.keras.layers.Layer):
def __init__(self, config, **kwargs):
super().__init__(**kwargs)
self.dense = tf.keras.layers.Dense(
config.num_classes, kernel_initializer='glorot_uniform', name="dense"
)
self.dropout = tf.keras.layers.Dropout(config.agg_dropout)
def call(self, inputs, training=False):
node_preds = self.dense(inputs)
node_preds = self.dropout(node_preds, training=training)
return node_preds
aggregators = {
'gcn': GCNAggregator,
'mean': MeanAggregator,
'meanpool': MeanPoolingAggregator,
'maxpool': MaxPoolingAggregator,
'twomaxpool': TwoMaxLayerPoolingAggregator,
'seq': SeqAggregator,
'nodepred': NodePredict
}
def get(aggregator):
return aggregators.get(aggregator)
| 35.373297
| 115
| 0.627638
| 1,622
| 12,982
| 4.767571
| 0.077682
| 0.037114
| 0.062201
| 0.039571
| 0.868227
| 0.855683
| 0.851804
| 0.832407
| 0.827881
| 0.821803
| 0
| 0.006819
| 0.265753
| 12,982
| 367
| 116
| 35.373297
| 0.804448
| 0.001618
| 0
| 0.751799
| 0
| 0
| 0.047226
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071942
| false
| 0
| 0.014388
| 0.003597
| 0.140288
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8dd73f669ad99399a87b7829850c10714b048615
| 8,549
|
py
|
Python
|
dh.py
|
bct/esession-testsuite
|
b239dba44fc4403731152eed79372883fd76d483
|
[
"MIT"
] | 2
|
2016-03-09T07:45:16.000Z
|
2016-05-08T10:27:35.000Z
|
dh.py
|
bct/esession-testsuite
|
b239dba44fc4403731152eed79372883fd76d483
|
[
"MIT"
] | null | null | null |
dh.py
|
bct/esession-testsuite
|
b239dba44fc4403731152eed79372883fd76d483
|
[
"MIT"
] | null | null | null |
import string
# This file defines a number of constants; specifically, large primes suitable for
# use with the Diffie-Hellman key exchange.
#
# These constants have been obtained from RFC2409 and RFC3526.
generators = [ None, # one to get the right offset
2,
2,
None,
None,
2,
None,
None,
None,
None,
None,
None,
None,
None,
2, # group 14
2,
2,
2,
2,
]
hex_primes = [ None,
# group 1
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A63A3620 FFFFFFFF FFFFFFFF''',
# group 2
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE65381
FFFFFFFF FFFFFFFF''',
# XXX how do I obtain these?
None,
None,
# group 5
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA237327 FFFFFFFF FFFFFFFF''',
None,
None,
None,
None,
None,
None,
None,
None,
# group 14
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA18217C 32905E46 2E36CE3B
E39E772C 180E8603 9B2783A2 EC07A28F B5C55DF0 6F4C52C9
DE2BCBF6 95581718 3995497C EA956AE5 15D22618 98FA0510
15728E5A 8AACAA68 FFFFFFFF FFFFFFFF''',
# group 15
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA18217C 32905E46 2E36CE3B
E39E772C 180E8603 9B2783A2 EC07A28F B5C55DF0 6F4C52C9
DE2BCBF6 95581718 3995497C EA956AE5 15D22618 98FA0510
15728E5A 8AAAC42D AD33170D 04507A33 A85521AB DF1CBA64
ECFB8504 58DBEF0A 8AEA7157 5D060C7D B3970F85 A6E1E4C7
ABF5AE8C DB0933D7 1E8C94E0 4A25619D CEE3D226 1AD2EE6B
F12FFA06 D98A0864 D8760273 3EC86A64 521F2B18 177B200C
BBE11757 7A615D6C 770988C0 BAD946E2 08E24FA0 74E5AB31
43DB5BFC E0FD108E 4B82D120 A93AD2CA FFFFFFFF FFFFFFFF''',
# group 16
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA18217C 32905E46 2E36CE3B
E39E772C 180E8603 9B2783A2 EC07A28F B5C55DF0 6F4C52C9
DE2BCBF6 95581718 3995497C EA956AE5 15D22618 98FA0510
15728E5A 8AAAC42D AD33170D 04507A33 A85521AB DF1CBA64
ECFB8504 58DBEF0A 8AEA7157 5D060C7D B3970F85 A6E1E4C7
ABF5AE8C DB0933D7 1E8C94E0 4A25619D CEE3D226 1AD2EE6B
F12FFA06 D98A0864 D8760273 3EC86A64 521F2B18 177B200C
BBE11757 7A615D6C 770988C0 BAD946E2 08E24FA0 74E5AB31
43DB5BFC E0FD108E 4B82D120 A9210801 1A723C12 A787E6D7
88719A10 BDBA5B26 99C32718 6AF4E23C 1A946834 B6150BDA
2583E9CA 2AD44CE8 DBBBC2DB 04DE8EF9 2E8EFC14 1FBECAA6
287C5947 4E6BC05D 99B2964F A090C3A2 233BA186 515BE7ED
1F612970 CEE2D7AF B81BDD76 2170481C D0069127 D5B05AA9
93B4EA98 8D8FDDC1 86FFFB7DC 90A6C08F 4DF435C9 34063199
FFFFFFFF FFFFFFFF''',
# group 17
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1 29024E08
8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD EF9519B3 CD3A431B
302B0A6D F25F1437 4FE1356D 6D51C245 E485B576 625E7EC6 F44C42E9
A637ED6B 0BFF5CB6 F406B7ED EE386BFB 5A899FA5 AE9F2411 7C4B1FE6
49286651 ECE45B3D C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8
FD24CF5F 83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA18217C 32905E46 2E36CE3B E39E772C
180E8603 9B2783A2 EC07A28F B5C55DF0 6F4C52C9 DE2BCBF6 95581718
3995497C EA956AE5 15D22618 98FA0510 15728E5A 8AAAC42D AD33170D
04507A33 A85521AB DF1CBA64 ECFB8504 58DBEF0A 8AEA7157 5D060C7D
B3970F85 A6E1E4C7 ABF5AE8C DB0933D7 1E8C94E0 4A25619D CEE3D226
1AD2EE6B F12FFA06 D98A0864 D8760273 3EC86A64 521F2B18 177B200C
BBE11757 7A615D6C 770988C0 BAD946E2 08E24FA0 74E5AB31 43DB5BFC
E0FD108E 4B82D120 A9210801 1A723C12 A787E6D7 88719A10 BDBA5B26
99C32718 6AF4E23C 1A946834 B6150BDA 2583E9CA 2AD44CE8 DBBBC2DB
04DE8EF9 2E8EFC14 1FBECAA6 287C5947 4E6BC05D 99B2964F A090C3A2
233BA186 515BE7ED 1F612970 CEE2D7AF B81BDD76 2170481C D0069127
D5B05AA9 93B4EA98 8D8FDDC1 86FFB7DC 90A6C08F 4DF435C9 34028492
36C3FAB4 D27C7026 C1D4DCB2 602646DE C9751E76 3DBA37BD F8FF9406
AD9E530E E5DB382F 413001AE B06A53ED 9027D831 179727B0 865A8918
DA3EDBEB CF9B14ED 44CE6CBA CED4BB1B DB7F1447 E6CC254B 33205151
2BD7AF42 6FB8F401 378CD2BF 5983CA01 C64B92EC F032EA15 D1721D03
F482D7CE 6E74FEF6 D55E702F 46980C82 B5A84031 900B1C9E 59E7C97F
BEC7E8F3 23A97A7E 36CC88BE 0F1D45B7 FF585AC5 4BD407B2 2B4154AA
CC8F6D7E BF48E1D8 14CC5ED2 0F8037E0 A79715EE F29BE328 06A1D58B
B7C5DA76 F550AA3D 8A1FBFF0 EB19CCB1 A313D55C DA56C9EC 2EF29632
387FE8D7 6E3C0468 043E8F66 3F4860EE 12BF2D5B 0B7474D6 E694F91E
6DCC4024 FFFFFFFF FFFFFFFF''',
# group 18
'''FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA18217C 32905E46 2E36CE3B
E39E772C 180E8603 9B2783A2 EC07A28F B5C55DF0 6F4C52C9
DE2BCBF6 95581718 3995497C EA956AE5 15D22618 98FA0510
15728E5A 8AAAC42D AD33170D 04507A33 A85521AB DF1CBA64
ECFB8504 58DBEF0A 8AEA7157 5D060C7D B3970F85 A6E1E4C7
ABF5AE8C DB0933D7 1E8C94E0 4A25619D CEE3D226 1AD2EE6B
F12FFA06 D98A0864 D8760273 3EC86A64 521F2B18 177B200C
BBE11757 7A615D6C 770988C0 BAD946E2 08E24FA0 74E5AB31
43DB5BFC E0FD108E 4B82D120 A9210801 1A723C12 A787E6D7
88719A10 BDBA5B26 99C32718 6AF4E23C 1A946834 B6150BDA
2583E9CA 2AD44CE8 DBBBC2DB 04DE8EF9 2E8EFC14 1FBECAA6
287C5947 4E6BC05D 99B2964F A090C3A2 233BA186 515BE7ED
1F612970 CEE2D7AF B81BDD76 2170481C D0069127 D5B05AA9
93B4EA98 8D8FDDC1 86FFB7DC 90A6C08F 4DF435C9 34028492
36C3FAB4 D27C7026 C1D4DCB2 602646DE C9751E76 3DBA37BD
F8FF9406 AD9E530E E5DB382F 413001AE B06A53ED 9027D831
179727B0 865A8918 DA3EDBEB CF9B14ED 44CE6CBA CED4BB1B
DB7F1447 E6CC254B 33205151 2BD7AF42 6FB8F401 378CD2BF
5983CA01 C64B92EC F032EA15 D1721D03 F482D7CE 6E74FEF6
D55E702F 46980C82 B5A84031 900B1C9E 59E7C97F BEC7E8F3
23A97A7E 36CC88BE 0F1D45B7 FF585AC5 4BD407B2 2B4154AA
CC8F6D7E BF48E1D8 14CC5ED2 0F8037E0 A79715EE F29BE328
06A1D58B B7C5DA76 F550AA3D 8A1FBFF0 EB19CCB1 A313D55C
DA56C9EC 2EF29632 387FE8D7 6E3C0468 043E8F66 3F4860EE
12BF2D5B 0B7474D6 E694F91E 6DBE1159 74A3926F 12FEE5E4
38777CB6 A932DF8C D8BEC4D0 73B931BA 3BC832B6 8D9DD300
741FA7BF 8AFC47ED 2576F693 6BA42466 3AAB639C 5AE4F568
3423B474 2BF1C978 238F16CB E39D652D E3FDB8BE FC848AD9
22222E04 A4037C07 13EB57A8 1A23F0C7 3473FC64 6CEA306B
4BCBC886 2F8385DD FA9D4B7F A2C087E8 79683303 ED5BDD3A
062B3CF5 B3A278A6 6D2A13F8 3F44F82D DF310EE0 74AB6A36
4597E899 A0255DC1 64F31CC5 0846851D F9AB4819 5DED7EA1
B1D510BD 7EE74D73 FAF36BC3 1ECFA268 359046F4 EB879F92
4009438B 481C6CD7 889A002E D5EE382B C9190DA6 FC026E47
9558E447 5677E9AA 9E3050E2 765694DF C81F56E8 80B96E71
60C980DD 98EDD3DF FFFFFFFF FFFFFFFF'''
]
all_ascii = ''.join(map(chr, range(256)))
def hex_to_decimal(stripee):
if not stripee:
return None
return int(stripee.translate(all_ascii, string.whitespace), 16)
primes = map(hex_to_decimal, hex_primes)
| 41.100962
| 82
| 0.849339
| 966
| 8,549
| 7.508282
| 0.339545
| 0.017648
| 0.019854
| 0.02206
| 0.834965
| 0.834965
| 0.834965
| 0.834965
| 0.826141
| 0.826141
| 0
| 0.550676
| 0.134402
| 8,549
| 207
| 83
| 41.299517
| 0.429459
| 0.036963
| 0
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021739
| false
| 0
| 0.021739
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8ddc1083fbf01d5530ecbac6dfa16d45f5467d02
| 3,505
|
py
|
Python
|
diracnets/diracconv.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 51
|
2019-02-01T19:43:37.000Z
|
2022-03-16T09:07:03.000Z
|
diracnets/diracconv.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 2
|
2019-02-23T18:54:22.000Z
|
2019-11-09T01:30:32.000Z
|
diracnets/diracconv.py
|
mikimaus78/ml_monorepo
|
b2c2627ff0e86e27f6829170d0dac168d8e5783b
|
[
"BSD-3-Clause"
] | 35
|
2019-02-08T02:00:31.000Z
|
2022-03-01T23:17:00.000Z
|
import torch
from torch import nn
import torch.nn.functional as F
from torch.nn.init import dirac_
def normalize(w):
"""Normalizes weight tensor over full filter."""
return F.normalize(w.view(w.shape[0], -1)).view_as(w)
class DiracConv(nn.Module):
def init_params(self, out_channels):
self.alpha = nn.Parameter(torch.Tensor(out_channels).fill_(1))
self.beta = nn.Parameter(torch.Tensor(out_channels).fill_(0.1))
self.register_buffer('delta', dirac_(self.weight.data.clone()))
assert self.delta.shape == self.weight.shape
self.v = (-1,) + (1,) * (self.weight.dim() - 1)
def transform_weight(self):
return self.alpha.view(*self.v) * self.delta + self.beta.view(*self.v) * normalize(self.weight)
class DiracConv1d(nn.Conv1d, DiracConv):
"""Dirac parametrized convolutional layer.
Works the same way as `nn.Conv1d`, but has additional weight parametrizatoin:
:math:`\alpha\delta + \beta W`,
where:
:math:`\alpha` and :math:`\beta` are learnable scalars,
:math:`\delta` is such a tensor so that `F.conv1d(x, delta) = x`, ie
Kroneker delta
`W` is weight tensor
It is user's responsibility to set correcting padding. Only stride=1 supported.
"""
def __init__(self, in_channels, out_channels, kernel_size, padding=0, dilation=1, bias=True):
super().__init__(in_channels, out_channels, kernel_size, stride=1, padding=padding, dilation=dilation, bias=bias)
self.init_params(out_channels)
def forward(self, input):
return F.conv1d(input, self.transform_weight(), self.bias, self.stride, self.padding, self.dilation)
class DiracConv2d(nn.Conv2d, DiracConv):
"""Dirac parametrized convolutional layer.
Works the same way as `nn.Conv2d`, but has additional weight parametrizatoin:
:math:`\alpha\delta + \beta W`,
where:
:math:`\alpha` and :math:`\beta` are learnable scalars,
:math:`\delta` is such a tensor so that `F.conv2d(x, delta) = x`, ie
Kroneker delta
`W` is weight tensor
It is user's responsibility to set correcting padding. Only stride=1 supported.
"""
def __init__(self, in_channels, out_channels, kernel_size, padding=0, dilation=1, bias=True):
super().__init__(in_channels, out_channels, kernel_size, stride=1, padding=padding, dilation=dilation, bias=bias)
self.init_params(out_channels)
def forward(self, input):
return F.conv2d(input, self.transform_weight(), self.bias, self.stride, self.padding, self.dilation)
class DiracConv3d(nn.Conv3d, DiracConv):
"""Dirac parametrized convolutional layer.
Works the same way as `nn.Conv3d`, but has additional weight parametrizatoin:
:math:`\alpha\delta + \beta W`,
where:
:math:`\alpha` and :math:`\beta` are learnable scalars,
:math:`\delta` is such a tensor so that `F.conv3d(x, delta) = x`, ie
Kroneker delta
`W` is weight tensor
It is user's responsibility to set correcting padding. Only stride=1 supported.
"""
def __init__(self, in_channels, out_channels, kernel_size, padding=0, dilation=1, bias=True):
super().__init__(in_channels, out_channels, kernel_size, stride=1, padding=padding, dilation=dilation, bias=bias)
self.init_params(out_channels)
def forward(self, input):
return F.conv3d(input, self.transform_weight(), self.bias, self.stride, self.padding, self.dilation)
| 39.382022
| 121
| 0.678174
| 483
| 3,505
| 4.792961
| 0.194617
| 0.057019
| 0.033693
| 0.054428
| 0.764579
| 0.764579
| 0.764579
| 0.732613
| 0.732613
| 0.732613
| 0
| 0.012442
| 0.197432
| 3,505
| 88
| 122
| 39.829545
| 0.810523
| 0.371469
| 0
| 0.363636
| 0
| 0
| 0.002428
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 1
| 0.272727
| false
| 0
| 0.121212
| 0.121212
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
5c18be482489c57c922494dd9fb6db51a145cc34
| 84,195
|
py
|
Python
|
tests/elichika_typing/EspNet_test.py
|
take-cheeze/chainer-compiler
|
b3914fa4938a2715057e4dbbe3e95c09d798e135
|
[
"MIT"
] | null | null | null |
tests/elichika_typing/EspNet_test.py
|
take-cheeze/chainer-compiler
|
b3914fa4938a2715057e4dbbe3e95c09d798e135
|
[
"MIT"
] | null | null | null |
tests/elichika_typing/EspNet_test.py
|
take-cheeze/chainer-compiler
|
b3914fa4938a2715057e4dbbe3e95c09d798e135
|
[
"MIT"
] | null | null | null |
import chainer
import numpy as np
import unittest
from chainer_compiler.elichika.testtools import generate_id2type_from_forward
from chainer_compiler.elichika.testtools import type_inference_tools
from testcases.elichika_tests.utils import sequence_utils
from testcases.elichika_tests.model.EspNet_AttDot import AttDot
from testcases.elichika_tests.model.EspNet_AttLoc import AttLoc
from testcases.elichika_tests.model.EspNet_BLSTM import BLSTM
from testcases.elichika_tests.model.EspNet_Decoder import Decoder
from testcases.elichika_tests.model.EspNet_E2E import E2E, test_recipe
from testcases.elichika_tests.model.EspNet_VGG2L import VGG2L
from testcases.elichika_tests.model.StatelessLSTM import StatelessLSTM
def gen_AttDot_model():
type_inference_tools.reset_state()
eprojs = 3
dunits = 4
att_dim = 5
batch_size = 3
sequence_length = 4
num_vocabs = 10
model = AttDot(eprojs, dunits, att_dim)
labels, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, num_vocabs)
xs = []
for l in ilens:
xs.append(np.random.rand(l, eprojs).astype(np.float32))
forward_args = (xs, None, None)
return model, forward_args
def gen_AttLoc_model():
type_inference_tools.reset_state()
eprojs = 3
dunits = 4
att_dim = 5
batch_size = 3
sequence_length = 4
num_vocabs = 10
aconv_chans = 7
aconv_filts = 6
labels, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, num_vocabs)
xs = []
for l in ilens:
xs.append(np.random.rand(l, eprojs).astype(dtype=np.float32))
model = AttLoc(eprojs, dunits, att_dim, aconv_chans, aconv_filts)
forward_args = (xs, None, None)
return model, forward_args
def gen_StatelessLSTM_model():
type_inference_tools.reset_state()
batch_size = 3
in_size = 7
out_size = 4
c = chainer.Variable(np.random.rand(batch_size, out_size).astype(np.float32))
h = chainer.Variable(np.random.rand(batch_size, out_size).astype(np.float32))
x = chainer.Variable(np.random.rand(batch_size, in_size).astype(np.float32))
model = StatelessLSTM(in_size, out_size)
forward_args = (c, h, x)
return model, forward_args
def gen_VGG2L_model():
type_inference_tools.reset_state()
idim = 5
elayers = 2
cdim = 3
hdim = 7
batch_size = 3
sequence_length = 4
num_vocabs = 10
labels, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, num_vocabs)
xs = []
for l in ilens:
xs.append(np.random.rand(l, idim).astype(dtype=np.float32))
model = VGG2L(1)
forward_args = (xs, ilens)
return model, forward_args
def gen_BLSTM_model():
type_inference_tools.reset_state()
idim = 5
elayers = 2
cdim = 3
hdim = 7
batch_size = 3
sequence_length = 4
num_vocabs = 10
model = BLSTM(idim, elayers, cdim, hdim, 0)
labels, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, num_vocabs)
xs = []
for l in ilens:
xs.append(np.random.rand(l, idim).astype(dtype=np.float32))
forward_args = (xs, ilens)
return model, forward_args
def gen_Decoder_model():
type_inference_tools.reset_state()
eprojs = 3
dunits = 4
att_dim = 5
batch_size = 3
sequence_length = 4
num_vocabs = 10
dlayers = 2
odim = 11
sos = odim - 1
eos = odim - 1
aconv_chans = 7
aconv_filts = 6
labels, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, num_vocabs)
hs = []
for l in ilens:
hs.append(np.random.rand(l, eprojs).astype(dtype=np.float32))
ys, ilens = sequence_utils.gen_random_sequence(
batch_size, sequence_length, odim)
model = Decoder(eprojs, odim, dlayers, dunits, sos, eos, att_dim)
forward_args = (hs, ys)
return model, forward_args
def gen_E2E_model():
type_inference_tools.reset_state()
(idim, odim, args), (xs, ilens, ys) = test_recipe()
model = E2E(idim, odim, args, nobias=True)
forward_args = (xs, ilens, ys)
return model, forward_args
class TestEspNet(unittest.TestCase):
def test_AttDot(self):
model, forward_args = gen_AttDot_model()
id2type = generate_id2type_from_forward(model, forward_args)
# === BEGIN ASSERTIONS for AttDot ===
self.assertEqual(str(id2type[1]), "class AttDot -> [ndarray(float32, (4, 3)), ndarray(float32, (3, 3)), ndarray(float32, (3, 3))] -> NoneType -> NoneType -> (Variable(float32, (3, 3)), Variable(float32, (3, 4)))") # FunctionDef forward (line 1)
self.assertEqual(str(id2type[11]), "NoneType") # Expr
self.assertEqual(str(id2type[12]), "string") # Constant "..." (line 8)
self.assertEqual(str(id2type[13]), "NoneType") # Assign
self.assertEqual(str(id2type[14]), "float") # Name scaling (line 10)
self.assertEqual(str(id2type[16]), "float") # Constant 2.0 (line 10)
self.assertEqual(str(id2type[17]), "NoneType") # Assign
self.assertEqual(str(id2type[18]), "int") # Name batch (line 11)
self.assertEqual(str(id2type[20]), "int") # Call len(enc_hs) (line 11)
self.assertEqual(str(id2type[21]), "[ndarray(float32, (4, 3)), ndarray(float32, (3, 3)), ndarray(float32, (3, 3))] -> int") # Name len (line 11)
self.assertEqual(str(id2type[23]), "[ndarray(float32, (4, 3)), ndarray(float32, (3, 3)), ndarray(float32, (3, 3))]") # Name enc_hs (line 11)
self.assertEqual(str(id2type[25]), "NoneType") # If
self.assertEqual(str(id2type[26]), "bool") # Compare (line 14)
self.assertEqual(str(id2type[27]), "NoneType") # Attribute self.enc_h (line 14)
self.assertEqual(str(id2type[28]), "class AttDot") # Name self (line 14)
self.assertEqual(str(id2type[32]), "NoneType") # Constant None (line 14)
self.assertEqual(str(id2type[33]), "NoneType") # Assign
self.assertEqual(str(id2type[34]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 15)
self.assertEqual(str(id2type[35]), "class AttDot") # Name self (line 15)
self.assertEqual(str(id2type[38]), "Variable(float32, (3, 4, 3))") # Call F.pad_sequence(enc_hs) (line 15)
self.assertEqual(str(id2type[39]), "[ndarray(float32, (4, 3)), ndarray(float32, (3, 3)), ndarray(float32, (3, 3))] -> Variable(float32, (3, 4, 3))") # Attribute F.pad_sequence (line 15)
self.assertEqual(str(id2type[43]), "[ndarray(float32, (4, 3)), ndarray(float32, (3, 3)), ndarray(float32, (3, 3))]") # Name enc_hs (line 15)
self.assertEqual(str(id2type[45]), "NoneType") # If
self.assertEqual(str(id2type[46]), "bool") # Compare (line 17)
self.assertEqual(str(id2type[47]), "NoneType") # Attribute self.pre_compute_enc_h (line 17)
self.assertEqual(str(id2type[48]), "class AttDot") # Name self (line 17)
self.assertEqual(str(id2type[52]), "NoneType") # Constant None (line 17)
self.assertEqual(str(id2type[53]), "NoneType") # Assign
self.assertEqual(str(id2type[54]), "int") # Attribute self.h_length (line 18)
self.assertEqual(str(id2type[55]), "class AttDot") # Name self (line 18)
self.assertEqual(str(id2type[58]), "int") # Subscript self.enc_h.shape[1] (line 18)
self.assertEqual(str(id2type[59]), "(int, int, int)") # Attribute self.enc_h.shape (line 18)
self.assertEqual(str(id2type[60]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 18)
self.assertEqual(str(id2type[61]), "class AttDot") # Name self (line 18)
self.assertEqual(str(id2type[66]), "int") # Constant 1 (line 18)
self.assertEqual(str(id2type[68]), "NoneType") # Assign
self.assertEqual(str(id2type[69]), "Variable(float32, (3, 4, 5))") # Attribute self.pre_compute_enc_h (line 20)
self.assertEqual(str(id2type[70]), "class AttDot") # Name self (line 20)
self.assertEqual(str(id2type[73]), "Variable(float32, (3, 4, 5))") # Call F.tanh(linear_tensor(self.mlp_enc, self.enc_h)) (line 20)
self.assertEqual(str(id2type[74]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5))") # Attribute F.tanh (line 20)
self.assertEqual(str(id2type[78]), "Variable(float32, (3, 4, 5))") # Call linear_tensor(self.mlp_enc, self.enc_h) (line 21)
self.assertEqual(str(id2type[79]), "class Linear -> Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 5))") # Name linear_tensor (line 21)
self.assertEqual(str(id2type[81]), "class Linear") # Attribute self.mlp_enc (line 21)
self.assertEqual(str(id2type[82]), "class AttDot") # Name self (line 21)
self.assertEqual(str(id2type[85]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 21)
self.assertEqual(str(id2type[86]), "class AttDot") # Name self (line 21)
self.assertEqual(str(id2type[89]), "NoneType") # If
self.assertEqual(str(id2type[90]), "bool") # Compare (line 23)
self.assertEqual(str(id2type[91]), "NoneType") # Name dec_z (line 23)
self.assertEqual(str(id2type[94]), "NoneType") # Constant None (line 23)
self.assertEqual(str(id2type[95]), "NoneType") # Assign
self.assertEqual(str(id2type[96]), "Variable(float32, (3, 4))") # Name dec_z (line 24)
self.assertEqual(str(id2type[98]), "Variable(float32, (3, 4))") # Call chainer.Variable(self.xp.zeros((batch, self.dunits), dtype=np.float32)) (line 24)
self.assertEqual(str(id2type[99]), "ndarray(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute chainer.Variable (line 24)
self.assertEqual(str(id2type[103]), "ndarray(float32, (3, 4))") # Call self.xp.zeros((batch, self.dunits), dtype=np.float32) (line 24)
self.assertEqual(str(id2type[104]), "(int, int) -> ndarray(float32, (3, 4))") # Attribute self.xp.zeros (line 24)
self.assertEqual(str(id2type[105]), "class module") # Attribute self.xp (line 24)
self.assertEqual(str(id2type[106]), "class AttDot") # Name self (line 24)
self.assertEqual(str(id2type[110]), "(int, int)") # Tuple (batch, self.dunits) (line 25)
self.assertEqual(str(id2type[111]), "int") # Name batch (line 25)
self.assertEqual(str(id2type[113]), "int") # Attribute self.dunits (line 25)
self.assertEqual(str(id2type[114]), "class AttDot") # Name self (line 25)
self.assertEqual(str(id2type[119]), "dtype(float32)") # Attribute np.float32 (line 25)
self.assertEqual(str(id2type[123]), "NoneType") # Assign
self.assertEqual(str(id2type[124]), "a15 (from line 27)") # Name dec_z (line 27)
self.assertEqual(str(id2type[126]), "a15 (from line 27)") # Call F.reshape(dec_z, (batch, self.dunits)) (line 27)
self.assertEqual(str(id2type[127]), "a11 -> (int, int) -> a15 (from line 27)") # Attribute F.reshape (line 27)
self.assertEqual(str(id2type[131]), "a11") # Name dec_z (line 27)
self.assertEqual(str(id2type[133]), "(int, int)") # Tuple (batch, self.dunits) (line 27)
self.assertEqual(str(id2type[134]), "int") # Name batch (line 27)
self.assertEqual(str(id2type[136]), "int") # Attribute self.dunits (line 27)
self.assertEqual(str(id2type[137]), "class AttDot") # Name self (line 27)
self.assertEqual(str(id2type[141]), "NoneType") # Assign
self.assertEqual(str(id2type[142]), "Variable(float32, (3, 4, 5))") # Name u (line 30)
self.assertEqual(str(id2type[144]), "Variable(float32, (3, 4, 5))") # Call F.broadcast_to(F.expand_dims(F.tanh(self.mlp_dec(dec_z)), 1), self.pre_compute_enc_h.shape) (line 30)
self.assertEqual(str(id2type[145]), "Variable(float32, (3, 1, 5)) -> (int, int, int) -> Variable(float32, (3, 4, 5))") # Attribute F.broadcast_to (line 30)
self.assertEqual(str(id2type[149]), "Variable(float32, (3, 1, 5))") # Call F.expand_dims(F.tanh(self.mlp_dec(dec_z)), 1) (line 30)
self.assertEqual(str(id2type[150]), "Variable(float32, (3, 5)) -> int -> Variable(float32, (3, 1, 5))") # Attribute F.expand_dims (line 30)
self.assertEqual(str(id2type[154]), "Variable(float32, (3, 5))") # Call F.tanh(self.mlp_dec(dec_z)) (line 30)
self.assertEqual(str(id2type[155]), "Variable(float32, (3, 5)) -> Variable(float32, (3, 5))") # Attribute F.tanh (line 30)
self.assertEqual(str(id2type[159]), "Variable(float32, (3, 5))") # Call self.mlp_dec(dec_z) (line 30)
self.assertEqual(str(id2type[160]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 5))") # Attribute self.mlp_dec (line 30)
self.assertEqual(str(id2type[161]), "class AttDot") # Name self (line 30)
self.assertEqual(str(id2type[164]), "Variable(float32, (3, 4))") # Name dec_z (line 30)
self.assertEqual(str(id2type[166]), "int") # Constant 1 (line 30)
self.assertEqual(str(id2type[167]), "(int, int, int)") # Attribute self.pre_compute_enc_h.shape (line 31)
self.assertEqual(str(id2type[168]), "Variable(float32, (3, 4, 5))") # Attribute self.pre_compute_enc_h (line 31)
self.assertEqual(str(id2type[169]), "class AttDot") # Name self (line 31)
self.assertEqual(str(id2type[173]), "NoneType") # Assign
self.assertEqual(str(id2type[174]), "Variable(float32, (3, 4))") # Name e (line 32)
self.assertEqual(str(id2type[176]), "Variable(float32, (3, 4))") # Call F.sum(self.pre_compute_enc_h * u, axis=2) (line 32)
self.assertEqual(str(id2type[177]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4))") # Attribute F.sum (line 32)
self.assertEqual(str(id2type[181]), "Variable(float32, (3, 4, 5))") # BinOp self.pre_compute_enc_h * u (line 32)
self.assertEqual(str(id2type[182]), "Variable(float32, (3, 4, 5))") # Attribute self.pre_compute_enc_h (line 32)
self.assertEqual(str(id2type[183]), "class AttDot") # Name self (line 32)
self.assertEqual(str(id2type[186]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5))") # Mult
self.assertEqual(str(id2type[187]), "Variable(float32, (3, 4, 5))") # Name u (line 32)
self.assertEqual(str(id2type[190]), "int") # Constant 2 (line 32)
self.assertEqual(str(id2type[191]), "NoneType") # Assign
self.assertEqual(str(id2type[192]), "Variable(float32, (3, 4))") # Name w (line 36)
self.assertEqual(str(id2type[194]), "Variable(float32, (3, 4))") # Call F.softmax(scaling * e) (line 36)
self.assertEqual(str(id2type[195]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.softmax (line 36)
self.assertEqual(str(id2type[199]), "Variable(float32, (3, 4))") # BinOp scaling * e (line 36)
self.assertEqual(str(id2type[200]), "float") # Name scaling (line 36)
self.assertEqual(str(id2type[202]), "float -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Mult
self.assertEqual(str(id2type[203]), "Variable(float32, (3, 4))") # Name e (line 36)
self.assertEqual(str(id2type[205]), "NoneType") # Assign
self.assertEqual(str(id2type[206]), "Variable(float32, (3, 3))") # Name c (line 39)
self.assertEqual(str(id2type[208]), "Variable(float32, (3, 3))") # Call F.sum(self.enc_h * F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape), axis=1) (line 39)
self.assertEqual(str(id2type[209]), "Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 3))") # Attribute F.sum (line 39)
self.assertEqual(str(id2type[213]), "Variable(float32, (3, 4, 3))") # BinOp self.enc_h * F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape) (line 39)
self.assertEqual(str(id2type[214]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 39)
self.assertEqual(str(id2type[215]), "class AttDot") # Name self (line 39)
self.assertEqual(str(id2type[218]), "Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 3))") # Mult
self.assertEqual(str(id2type[219]), "Variable(float32, (3, 4, 3))") # Call F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape) (line 39)
self.assertEqual(str(id2type[220]), "Variable(float32, (3, 4, 1)) -> (int, int, int) -> Variable(float32, (3, 4, 3))") # Attribute F.broadcast_to (line 39)
self.assertEqual(str(id2type[224]), "Variable(float32, (3, 4, 1))") # Call F.expand_dims(w, 2) (line 39)
self.assertEqual(str(id2type[225]), "Variable(float32, (3, 4)) -> int -> Variable(float32, (3, 4, 1))") # Attribute F.expand_dims (line 39)
self.assertEqual(str(id2type[229]), "Variable(float32, (3, 4))") # Name w (line 39)
self.assertEqual(str(id2type[231]), "int") # Constant 2 (line 39)
self.assertEqual(str(id2type[232]), "(int, int, int)") # Attribute self.enc_h.shape (line 39)
self.assertEqual(str(id2type[233]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 39)
self.assertEqual(str(id2type[234]), "class AttDot") # Name self (line 39)
self.assertEqual(str(id2type[239]), "int") # Constant 1 (line 39)
self.assertEqual(str(id2type[240]), "(Variable(float32, (3, 3)), Variable(float32, (3, 4)))") # Return
self.assertEqual(str(id2type[241]), "(Variable(float32, (3, 3)), Variable(float32, (3, 4)))") # Tuple (c, w) (line 41)
self.assertEqual(str(id2type[242]), "Variable(float32, (3, 3))") # Name c (line 41)
self.assertEqual(str(id2type[244]), "Variable(float32, (3, 4))") # Name w (line 41)
self.assertEqual(str(id2type[247]), "class Linear -> Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 5))") # FunctionDef linear_tensor (line 1)
self.assertEqual(str(id2type[253]), "NoneType") # Expr
self.assertEqual(str(id2type[254]), "string") # Constant "..." (line 8)
self.assertEqual(str(id2type[255]), "NoneType") # Assign
self.assertEqual(str(id2type[256]), "Variable(float32, (12, 5))") # Name y (line 9)
self.assertEqual(str(id2type[258]), "Variable(float32, (12, 5))") # Call linear(F.reshape(x, (-1, x.shape[-1]))) (line 9)
self.assertEqual(str(id2type[259]), "Variable(float32, (12, 3)) -> Variable(float32, (12, 5))") # Name linear (line 9)
self.assertEqual(str(id2type[261]), "Variable(float32, (12, 3))") # Call F.reshape(x, (-1, x.shape[-1])) (line 9)
self.assertEqual(str(id2type[262]), "Variable(float32, (3, 4, 3)) -> (int, int) -> Variable(float32, (12, 3))") # Attribute F.reshape (line 9)
self.assertEqual(str(id2type[266]), "Variable(float32, (3, 4, 3))") # Name x (line 9)
self.assertEqual(str(id2type[268]), "(int, int)") # Tuple (-1, x.shape[-1]) (line 9)
self.assertEqual(str(id2type[269]), "int") # UnaryOp -1 (line 9)
self.assertEqual(str(id2type[271]), "int") # Constant 1 (line 9)
self.assertEqual(str(id2type[272]), "int") # Subscript x.shape[-1] (line 9)
self.assertEqual(str(id2type[273]), "(int, int, int)") # Attribute x.shape (line 9)
self.assertEqual(str(id2type[274]), "Variable(float32, (3, 4, 3))") # Name x (line 9)
self.assertEqual(str(id2type[278]), "int") # UnaryOp -1 (line 9)
self.assertEqual(str(id2type[280]), "int") # Constant 1 (line 9)
self.assertEqual(str(id2type[283]), "Variable(float32, (3, 4, 5))") # Return
self.assertEqual(str(id2type[284]), "Variable(float32, (3, 4, 5))") # Call F.reshape(y, x.shape[:-1:] + (-1)) (line 10)
self.assertEqual(str(id2type[285]), "Variable(float32, (12, 5)) -> (int, int, int) -> Variable(float32, (3, 4, 5))") # Attribute F.reshape (line 10)
self.assertEqual(str(id2type[289]), "Variable(float32, (12, 5))") # Name y (line 10)
self.assertEqual(str(id2type[291]), "(int, int, int)") # BinOp x.shape[:-1:] + (-1) (line 10)
self.assertEqual(str(id2type[292]), "(int, int)") # Subscript x.shape[:-1:] (line 10)
self.assertEqual(str(id2type[293]), "(int, int, int)") # Attribute x.shape (line 10)
self.assertEqual(str(id2type[294]), "Variable(float32, (3, 4, 3))") # Name x (line 10)
self.assertEqual(str(id2type[298]), "int") # UnaryOp -1 (line 10)
self.assertEqual(str(id2type[300]), "int") # Constant 1 (line 10)
self.assertEqual(str(id2type[302]), "(int, int) -> (int,) -> (int, int, int)") # Add
self.assertEqual(str(id2type[303]), "(int,)") # Tuple (-1) (line 10)
self.assertEqual(str(id2type[304]), "int") # UnaryOp -1 (line 10)
self.assertEqual(str(id2type[306]), "int") # Constant 1 (line 10)
# === END ASSERTIONS for AttDot ===
def test_AttLoc(self):
model, forward_args = gen_AttLoc_model()
id2type = generate_id2type_from_forward(model, forward_args)
# === BEGIN ASSERTIONS for AttLoc ===
self.assertEqual(str(id2type[1]), "class AttLoc -> [ndarray(float32, (4, 3)), ndarray(float32, (2, 3)), ndarray(float32, (2, 3))] -> NoneType -> NoneType -> (Variable(float32, (3, 3)), Variable(float32, (3, 4)))") # FunctionDef forward (line 1)
self.assertEqual(str(id2type[11]), "NoneType") # Expr
self.assertEqual(str(id2type[12]), "string") # Constant "..." (line 9)
self.assertEqual(str(id2type[13]), "NoneType") # Assign
self.assertEqual(str(id2type[14]), "float") # Name scaling (line 11)
self.assertEqual(str(id2type[16]), "float") # Constant 2.0 (line 11)
self.assertEqual(str(id2type[17]), "NoneType") # Assign
self.assertEqual(str(id2type[18]), "int") # Name batch (line 12)
self.assertEqual(str(id2type[20]), "int") # Call len(enc_hs) (line 12)
self.assertEqual(str(id2type[21]), "[ndarray(float32, (4, 3)), ndarray(float32, (2, 3)), ndarray(float32, (2, 3))] -> int") # Name len (line 12)
self.assertEqual(str(id2type[23]), "[ndarray(float32, (4, 3)), ndarray(float32, (2, 3)), ndarray(float32, (2, 3))]") # Name enc_hs (line 12)
self.assertEqual(str(id2type[25]), "NoneType") # If
self.assertEqual(str(id2type[26]), "bool") # Compare (line 15)
self.assertEqual(str(id2type[27]), "NoneType") # Attribute self.enc_h (line 15)
self.assertEqual(str(id2type[28]), "class AttLoc") # Name self (line 15)
self.assertEqual(str(id2type[32]), "NoneType") # Constant None (line 15)
self.assertEqual(str(id2type[33]), "NoneType") # Assign
self.assertEqual(str(id2type[34]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 16)
self.assertEqual(str(id2type[35]), "class AttLoc") # Name self (line 16)
self.assertEqual(str(id2type[38]), "Variable(float32, (3, 4, 3))") # Call F.pad_sequence(enc_hs) (line 16)
self.assertEqual(str(id2type[39]), "[ndarray(float32, (4, 3)), ndarray(float32, (2, 3)), ndarray(float32, (2, 3))] -> Variable(float32, (3, 4, 3))") # Attribute F.pad_sequence (line 16)
self.assertEqual(str(id2type[43]), "[ndarray(float32, (4, 3)), ndarray(float32, (2, 3)), ndarray(float32, (2, 3))]") # Name enc_hs (line 16)
self.assertEqual(str(id2type[45]), "NoneType") # If
self.assertEqual(str(id2type[46]), "bool") # Compare (line 17)
self.assertEqual(str(id2type[47]), "NoneType") # Attribute self.h_length (line 17)
self.assertEqual(str(id2type[48]), "class AttLoc") # Name self (line 17)
self.assertEqual(str(id2type[52]), "NoneType") # Constant None (line 17)
self.assertEqual(str(id2type[53]), "NoneType") # Assign
self.assertEqual(str(id2type[54]), "int") # Attribute self.h_length (line 18)
self.assertEqual(str(id2type[55]), "class AttLoc") # Name self (line 18)
self.assertEqual(str(id2type[58]), "int") # Subscript self.enc_h.shape[1] (line 18)
self.assertEqual(str(id2type[59]), "(int, int, int)") # Attribute self.enc_h.shape (line 18)
self.assertEqual(str(id2type[60]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 18)
self.assertEqual(str(id2type[61]), "class AttLoc") # Name self (line 18)
self.assertEqual(str(id2type[66]), "int") # Constant 1 (line 18)
self.assertEqual(str(id2type[68]), "NoneType") # If
self.assertEqual(str(id2type[69]), "bool") # Compare (line 21)
self.assertEqual(str(id2type[70]), "NoneType") # Attribute self.pre_compute_enc_h (line 21)
self.assertEqual(str(id2type[71]), "class AttLoc") # Name self (line 21)
self.assertEqual(str(id2type[75]), "NoneType") # Constant None (line 21)
self.assertEqual(str(id2type[76]), "NoneType") # Assign
self.assertEqual(str(id2type[77]), "Variable(float32, (3, 4, 5))") # Attribute self.pre_compute_enc_h (line 23)
self.assertEqual(str(id2type[78]), "class AttLoc") # Name self (line 23)
self.assertEqual(str(id2type[81]), "Variable(float32, (3, 4, 5))") # Call linear_tensor_3d(self.mlp_enc, self.enc_h) (line 23)
self.assertEqual(str(id2type[82]), "class Linear -> Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 5))") # Name linear_tensor_3d (line 23)
self.assertEqual(str(id2type[84]), "class Linear") # Attribute self.mlp_enc (line 23)
self.assertEqual(str(id2type[85]), "class AttLoc") # Name self (line 23)
self.assertEqual(str(id2type[88]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 23)
self.assertEqual(str(id2type[89]), "class AttLoc") # Name self (line 23)
self.assertEqual(str(id2type[92]), "NoneType") # If
self.assertEqual(str(id2type[93]), "bool") # Compare (line 25)
self.assertEqual(str(id2type[94]), "NoneType") # Name dec_z (line 25)
self.assertEqual(str(id2type[97]), "NoneType") # Constant None (line 25)
self.assertEqual(str(id2type[98]), "NoneType") # Assign
self.assertEqual(str(id2type[99]), "Variable(float32, (3, 4))") # Name dec_z_new (line 26)
self.assertEqual(str(id2type[101]), "Variable(float32, (3, 4))") # Call chainer.Variable(self.xp.zeros((batch, self.dunits), dtype=np.float32)) (line 26)
self.assertEqual(str(id2type[102]), "ndarray(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute chainer.Variable (line 26)
self.assertEqual(str(id2type[106]), "ndarray(float32, (3, 4))") # Call self.xp.zeros((batch, self.dunits), dtype=np.float32) (line 26)
self.assertEqual(str(id2type[107]), "(int, int) -> ndarray(float32, (3, 4))") # Attribute self.xp.zeros (line 26)
self.assertEqual(str(id2type[108]), "class module") # Attribute self.xp (line 26)
self.assertEqual(str(id2type[109]), "class AttLoc") # Name self (line 26)
self.assertEqual(str(id2type[113]), "(int, int)") # Tuple (batch, self.dunits) (line 27)
self.assertEqual(str(id2type[114]), "int") # Name batch (line 27)
self.assertEqual(str(id2type[116]), "int") # Attribute self.dunits (line 27)
self.assertEqual(str(id2type[117]), "class AttLoc") # Name self (line 27)
self.assertEqual(str(id2type[122]), "dtype(float32)") # Attribute np.float32 (line 27)
self.assertEqual(str(id2type[126]), "NoneType") # Assign
self.assertEqual(str(id2type[127]), "a13 (from line 29)") # Name dec_z_new (line 29)
self.assertEqual(str(id2type[129]), "a13 (from line 29)") # Call F.reshape(dec_z, (batch, self.dunits)) (line 29)
self.assertEqual(str(id2type[130]), "a9 -> (int, int) -> a13 (from line 29)") # Attribute F.reshape (line 29)
self.assertEqual(str(id2type[134]), "a9") # Name dec_z (line 29)
self.assertEqual(str(id2type[136]), "(int, int)") # Tuple (batch, self.dunits) (line 29)
self.assertEqual(str(id2type[137]), "int") # Name batch (line 29)
self.assertEqual(str(id2type[139]), "int") # Attribute self.dunits (line 29)
self.assertEqual(str(id2type[140]), "class AttLoc") # Name self (line 29)
self.assertEqual(str(id2type[144]), "NoneType") # If
self.assertEqual(str(id2type[145]), "bool") # Compare (line 32)
self.assertEqual(str(id2type[146]), "NoneType") # Name att_prev (line 32)
self.assertEqual(str(id2type[149]), "NoneType") # Constant None (line 32)
self.assertEqual(str(id2type[150]), "NoneType") # Assign
self.assertEqual(str(id2type[151]), "ndarray(float32, (None,)) list") # Name att_prev (line 33)
self.assertEqual(str(id2type[153]), "ndarray(float32, (None,)) list") # ListComp (line 33)
self.assertEqual(str(id2type[154]), "ndarray(float32, (None,))") # Call self.xp.full(hh.shape[0], 1.0 / hh.shape[0], dtype=np.float32) (line 33)
self.assertEqual(str(id2type[155]), "int -> float -> ndarray(float32, (None,))") # Attribute self.xp.full (line 33)
self.assertEqual(str(id2type[156]), "class module") # Attribute self.xp (line 33)
self.assertEqual(str(id2type[157]), "class AttLoc") # Name self (line 33)
self.assertEqual(str(id2type[161]), "int") # Subscript hh.shape[0] (line 34)
self.assertEqual(str(id2type[162]), "(int, int)") # Attribute hh.shape (line 34)
self.assertEqual(str(id2type[163]), "ndarray(float32, (None, 3))") # Name hh (line 34)
self.assertEqual(str(id2type[167]), "int") # Constant 0 (line 34)
self.assertEqual(str(id2type[169]), "float") # BinOp 1.0 / hh.shape[0] (line 34)
self.assertEqual(str(id2type[170]), "float") # Constant 1.0 (line 34)
self.assertEqual(str(id2type[171]), "float -> int -> float") # Div
self.assertEqual(str(id2type[172]), "int") # Subscript hh.shape[0] (line 34)
self.assertEqual(str(id2type[173]), "(int, int)") # Attribute hh.shape (line 34)
self.assertEqual(str(id2type[174]), "ndarray(float32, (None, 3))") # Name hh (line 34)
self.assertEqual(str(id2type[178]), "int") # Constant 0 (line 34)
self.assertEqual(str(id2type[181]), "dtype(float32)") # Attribute np.float32 (line 34)
self.assertEqual(str(id2type[186]), "ndarray(float32, (None, 3))") # Name hh (line 34)
self.assertEqual(str(id2type[188]), "ndarray(float32, (None, 3)) list") # Name enc_hs (line 34)
self.assertEqual(str(id2type[190]), "NoneType") # Assign
self.assertEqual(str(id2type[191]), "Variable(float32, (None,)) list") # Name att_prev (line 35)
self.assertEqual(str(id2type[193]), "Variable(float32, (None,)) list") # ListComp (line 35)
self.assertEqual(str(id2type[194]), "Variable(float32, (None,))") # Call chainer.Variable(att) (line 35)
self.assertEqual(str(id2type[195]), "ndarray(float32, (None,)) -> Variable(float32, (None,))") # Attribute chainer.Variable (line 35)
self.assertEqual(str(id2type[199]), "ndarray(float32, (None,))") # Name att (line 35)
self.assertEqual(str(id2type[202]), "ndarray(float32, (None,))") # Name att (line 35)
self.assertEqual(str(id2type[204]), "ndarray(float32, (None,)) list") # Name att_prev (line 35)
self.assertEqual(str(id2type[206]), "NoneType") # Assign
self.assertEqual(str(id2type[207]), "Variable(float32, (None, None))") # Name att_prev (line 36)
self.assertEqual(str(id2type[209]), "Variable(float32, (None, None))") # Call F.pad_sequence(att_prev) (line 36)
self.assertEqual(str(id2type[210]), "Variable(float32, (None,)) list -> Variable(float32, (None, None))") # Attribute F.pad_sequence (line 36)
self.assertEqual(str(id2type[214]), "Variable(float32, (None,)) list") # Name att_prev (line 36)
self.assertEqual(str(id2type[216]), "NoneType") # Assign
self.assertEqual(str(id2type[217]), "Variable(float32, (3, 7, 1, 4))") # Name att_conv (line 40)
self.assertEqual(str(id2type[219]), "Variable(float32, (3, 7, 1, 4))") # Call self.loc_conv(F.reshape(att_prev, (batch, 1, 1, self.h_length))) (line 40)
self.assertEqual(str(id2type[220]), "Variable(float32, (3, 1, 1, 4)) -> Variable(float32, (3, 7, 1, 4))") # Attribute self.loc_conv (line 40)
self.assertEqual(str(id2type[221]), "class AttLoc") # Name self (line 40)
self.assertEqual(str(id2type[224]), "Variable(float32, (3, 1, 1, 4))") # Call F.reshape(att_prev, (batch, 1, 1, self.h_length)) (line 41)
self.assertEqual(str(id2type[225]), "Variable(float32, (None, None)) -> (int, int, int, int) -> Variable(float32, (3, 1, 1, 4))") # Attribute F.reshape (line 41)
self.assertEqual(str(id2type[229]), "Variable(float32, (None, None))") # Name att_prev (line 41)
self.assertEqual(str(id2type[231]), "(int, int, int, int)") # Tuple (batch, 1, 1, self.h_length) (line 41)
self.assertEqual(str(id2type[232]), "int") # Name batch (line 41)
self.assertEqual(str(id2type[234]), "int") # Constant 1 (line 41)
self.assertEqual(str(id2type[235]), "int") # Constant 1 (line 41)
self.assertEqual(str(id2type[236]), "int") # Attribute self.h_length (line 41)
self.assertEqual(str(id2type[237]), "class AttLoc") # Name self (line 41)
self.assertEqual(str(id2type[241]), "NoneType") # Assign
self.assertEqual(str(id2type[242]), "Variable(float32, (3, 4, 7))") # Name att_conv (line 43)
self.assertEqual(str(id2type[244]), "Variable(float32, (3, 4, 7))") # Call F.swapaxes(F.squeeze(att_conv, axis=2), 1, 2) (line 43)
self.assertEqual(str(id2type[245]), "Variable(float32, (3, 7, 4)) -> int -> int -> Variable(float32, (3, 4, 7))") # Attribute F.swapaxes (line 43)
self.assertEqual(str(id2type[249]), "Variable(float32, (3, 7, 4))") # Call F.squeeze(att_conv, axis=2) (line 43)
self.assertEqual(str(id2type[250]), "Variable(float32, (3, 7, 1, 4)) -> Variable(float32, (3, 7, 4))") # Attribute F.squeeze (line 43)
self.assertEqual(str(id2type[254]), "Variable(float32, (3, 7, 1, 4))") # Name att_conv (line 43)
self.assertEqual(str(id2type[257]), "int") # Constant 2 (line 43)
self.assertEqual(str(id2type[258]), "int") # Constant 1 (line 43)
self.assertEqual(str(id2type[259]), "int") # Constant 2 (line 43)
self.assertEqual(str(id2type[260]), "NoneType") # Assign
self.assertEqual(str(id2type[261]), "Variable(float32, (3, 4, 5))") # Name att_conv (line 45)
self.assertEqual(str(id2type[263]), "Variable(float32, (3, 4, 5))") # Call linear_tensor_3d(self.mlp_att, att_conv) (line 45)
self.assertEqual(str(id2type[264]), "class Linear -> Variable(float32, (3, 4, 7)) -> Variable(float32, (3, 4, 5))") # Name linear_tensor_3d (line 45)
self.assertEqual(str(id2type[266]), "class Linear") # Attribute self.mlp_att (line 45)
self.assertEqual(str(id2type[267]), "class AttLoc") # Name self (line 45)
self.assertEqual(str(id2type[270]), "Variable(float32, (3, 4, 7))") # Name att_conv (line 45)
self.assertEqual(str(id2type[272]), "NoneType") # Assign
self.assertEqual(str(id2type[273]), "Variable(float32, (3, 4, 5))") # Name dec_z_tiled (line 48)
self.assertEqual(str(id2type[275]), "Variable(float32, (3, 4, 5))") # Call F.broadcast_to(F.expand_dims(self.mlp_dec(dec_z_new), 1), self.pre_compute_enc_h.shape) (line 48)
self.assertEqual(str(id2type[276]), "Variable(float32, (3, 1, 5)) -> (int, int, int) -> Variable(float32, (3, 4, 5))") # Attribute F.broadcast_to (line 48)
self.assertEqual(str(id2type[280]), "Variable(float32, (3, 1, 5))") # Call F.expand_dims(self.mlp_dec(dec_z_new), 1) (line 49)
self.assertEqual(str(id2type[281]), "Variable(float32, (3, 5)) -> int -> Variable(float32, (3, 1, 5))") # Attribute F.expand_dims (line 49)
self.assertEqual(str(id2type[285]), "Variable(float32, (3, 5))") # Call self.mlp_dec(dec_z_new) (line 49)
self.assertEqual(str(id2type[286]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 5))") # Attribute self.mlp_dec (line 49)
self.assertEqual(str(id2type[287]), "class AttLoc") # Name self (line 49)
self.assertEqual(str(id2type[290]), "Variable(float32, (3, 4))") # Name dec_z_new (line 49)
self.assertEqual(str(id2type[292]), "int") # Constant 1 (line 49)
self.assertEqual(str(id2type[293]), "(int, int, int)") # Attribute self.pre_compute_enc_h.shape (line 49)
self.assertEqual(str(id2type[294]), "Variable(float32, (3, 4, 5))") # Attribute self.pre_compute_enc_h (line 49)
self.assertEqual(str(id2type[295]), "class AttLoc") # Name self (line 49)
self.assertEqual(str(id2type[299]), "NoneType") # Assign
self.assertEqual(str(id2type[300]), "Variable(float32, (3, 4))") # Name e (line 54)
self.assertEqual(str(id2type[302]), "Variable(float32, (3, 4))") # Call F.squeeze(linear_tensor_3d(self.gvec, F.tanh(att_conv + self.pre_compute_enc_h + dec_z_tiled)), axis=2) (line 54)
self.assertEqual(str(id2type[303]), "Variable(float32, (3, 4, 1)) -> Variable(float32, (3, 4))") # Attribute F.squeeze (line 54)
self.assertEqual(str(id2type[307]), "Variable(float32, (3, 4, 1))") # Call linear_tensor_3d(self.gvec, F.tanh(att_conv + self.pre_compute_enc_h + dec_z_tiled)) (line 54)
self.assertEqual(str(id2type[308]), "class Linear -> Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 1))") # Name linear_tensor_3d (line 54)
self.assertEqual(str(id2type[310]), "class Linear") # Attribute self.gvec (line 54)
self.assertEqual(str(id2type[311]), "class AttLoc") # Name self (line 54)
self.assertEqual(str(id2type[314]), "Variable(float32, (3, 4, 5))") # Call F.tanh(att_conv + self.pre_compute_enc_h + dec_z_tiled) (line 54)
self.assertEqual(str(id2type[315]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5))") # Attribute F.tanh (line 54)
self.assertEqual(str(id2type[319]), "Variable(float32, (3, 4, 5))") # BinOp att_conv + self.pre_compute_enc_h + dec_z_tiled (line 55)
self.assertEqual(str(id2type[320]), "Variable(float32, (3, 4, 5))") # BinOp att_conv + self.pre_compute_enc_h (line 55)
self.assertEqual(str(id2type[321]), "Variable(float32, (3, 4, 5))") # Name att_conv (line 55)
self.assertEqual(str(id2type[323]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5))") # Add
self.assertEqual(str(id2type[324]), "Variable(float32, (3, 4, 5))") # Attribute self.pre_compute_enc_h (line 55)
self.assertEqual(str(id2type[325]), "class AttLoc") # Name self (line 55)
self.assertEqual(str(id2type[328]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 5))") # Add
self.assertEqual(str(id2type[329]), "Variable(float32, (3, 4, 5))") # Name dec_z_tiled (line 55)
self.assertEqual(str(id2type[332]), "int") # Constant 2 (line 55)
self.assertEqual(str(id2type[333]), "NoneType") # Assign
self.assertEqual(str(id2type[334]), "Variable(float32, (3, 4))") # Name w (line 59)
self.assertEqual(str(id2type[336]), "Variable(float32, (3, 4))") # Call F.softmax(scaling * e) (line 59)
self.assertEqual(str(id2type[337]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.softmax (line 59)
self.assertEqual(str(id2type[341]), "Variable(float32, (3, 4))") # BinOp scaling * e (line 59)
self.assertEqual(str(id2type[342]), "float") # Name scaling (line 59)
self.assertEqual(str(id2type[344]), "float -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Mult
self.assertEqual(str(id2type[345]), "Variable(float32, (3, 4))") # Name e (line 59)
self.assertEqual(str(id2type[347]), "NoneType") # Assign
self.assertEqual(str(id2type[348]), "Variable(float32, (3, 3))") # Name c (line 63)
self.assertEqual(str(id2type[350]), "Variable(float32, (3, 3))") # Call F.sum(self.enc_h * F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape), axis=1) (line 63)
self.assertEqual(str(id2type[351]), "Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 3))") # Attribute F.sum (line 63)
self.assertEqual(str(id2type[355]), "Variable(float32, (3, 4, 3))") # BinOp self.enc_h * F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape) (line 63)
self.assertEqual(str(id2type[356]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 63)
self.assertEqual(str(id2type[357]), "class AttLoc") # Name self (line 63)
self.assertEqual(str(id2type[360]), "Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 3))") # Mult
self.assertEqual(str(id2type[361]), "Variable(float32, (3, 4, 3))") # Call F.broadcast_to(F.expand_dims(w, 2), self.enc_h.shape) (line 63)
self.assertEqual(str(id2type[362]), "Variable(float32, (3, 4, 1)) -> (int, int, int) -> Variable(float32, (3, 4, 3))") # Attribute F.broadcast_to (line 63)
self.assertEqual(str(id2type[366]), "Variable(float32, (3, 4, 1))") # Call F.expand_dims(w, 2) (line 63)
self.assertEqual(str(id2type[367]), "Variable(float32, (3, 4)) -> int -> Variable(float32, (3, 4, 1))") # Attribute F.expand_dims (line 63)
self.assertEqual(str(id2type[371]), "Variable(float32, (3, 4))") # Name w (line 63)
self.assertEqual(str(id2type[373]), "int") # Constant 2 (line 63)
self.assertEqual(str(id2type[374]), "(int, int, int)") # Attribute self.enc_h.shape (line 63)
self.assertEqual(str(id2type[375]), "Variable(float32, (3, 4, 3))") # Attribute self.enc_h (line 63)
self.assertEqual(str(id2type[376]), "class AttLoc") # Name self (line 63)
self.assertEqual(str(id2type[381]), "int") # Constant 1 (line 63)
self.assertEqual(str(id2type[382]), "(Variable(float32, (3, 3)), Variable(float32, (3, 4)))") # Return
self.assertEqual(str(id2type[383]), "(Variable(float32, (3, 3)), Variable(float32, (3, 4)))") # Tuple (c, w) (line 65)
self.assertEqual(str(id2type[384]), "Variable(float32, (3, 3))") # Name c (line 65)
self.assertEqual(str(id2type[386]), "Variable(float32, (3, 4))") # Name w (line 65)
self.assertEqual(str(id2type[389]), "class Linear -> Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 5))") # FunctionDef linear_tensor_3d (line 1)
self.assertEqual(str(id2type[395]), "NoneType") # Expr
self.assertEqual(str(id2type[396]), "string") # Constant "..." (line 8)
self.assertEqual(str(id2type[397]), "Variable(float32, (3, 4, 5))") # Return
self.assertEqual(str(id2type[398]), "Variable(float32, (3, 4, 5))") # Call linear(x, n_batch_axes=2) (line 9)
self.assertEqual(str(id2type[399]), "Variable(float32, (3, 4, 3)) -> Variable(float32, (3, 4, 5))") # Name linear (line 9)
self.assertEqual(str(id2type[401]), "Variable(float32, (3, 4, 3))") # Name x (line 9)
self.assertEqual(str(id2type[404]), "int") # Constant 2 (line 9)
self.assertEqual(str(id2type[405]), "class Linear -> Variable(float32, (3, 4, 7)) -> Variable(float32, (3, 4, 5))") # FunctionDef linear_tensor_3d (line 1)
self.assertEqual(str(id2type[411]), "NoneType") # Expr
self.assertEqual(str(id2type[412]), "string") # Constant "..." (line 8)
self.assertEqual(str(id2type[413]), "Variable(float32, (3, 4, 5))") # Return
self.assertEqual(str(id2type[414]), "Variable(float32, (3, 4, 5))") # Call linear(x, n_batch_axes=2) (line 9)
self.assertEqual(str(id2type[415]), "Variable(float32, (3, 4, 7)) -> Variable(float32, (3, 4, 5))") # Name linear (line 9)
self.assertEqual(str(id2type[417]), "Variable(float32, (3, 4, 7))") # Name x (line 9)
self.assertEqual(str(id2type[420]), "int") # Constant 2 (line 9)
self.assertEqual(str(id2type[421]), "class Linear -> Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 1))") # FunctionDef linear_tensor_3d (line 1)
self.assertEqual(str(id2type[427]), "NoneType") # Expr
self.assertEqual(str(id2type[428]), "string") # Constant "..." (line 8)
self.assertEqual(str(id2type[429]), "Variable(float32, (3, 4, 1))") # Return
self.assertEqual(str(id2type[430]), "Variable(float32, (3, 4, 1))") # Call linear(x, n_batch_axes=2) (line 9)
self.assertEqual(str(id2type[431]), "Variable(float32, (3, 4, 5)) -> Variable(float32, (3, 4, 1))") # Name linear (line 9)
self.assertEqual(str(id2type[433]), "Variable(float32, (3, 4, 5))") # Name x (line 9)
self.assertEqual(str(id2type[436]), "int") # Constant 2 (line 9)
# === END ASSERTIONS for AttLoc ===
def test_StatelessLSTM(self):
model, forward_args = gen_StatelessLSTM_model()
id2type = generate_id2type_from_forward(model, forward_args)
# === BEGIN ASSERTIONS for StatelessLSTM ===
self.assertEqual(str(id2type[1]), "class StatelessLSTM -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4)) -> Variable(float32, (3, 7)) -> (Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # FunctionDef forward (line 1)
self.assertEqual(str(id2type[11]), "NoneType") # Expr
self.assertEqual(str(id2type[12]), "string") # Constant "..." (line 14)
self.assertEqual(str(id2type[13]), "NoneType") # Assign
self.assertEqual(str(id2type[14]), "Variable(float32, (3, 16))") # Name lstm_in (line 22)
self.assertEqual(str(id2type[16]), "Variable(float32, (3, 16))") # Call self.upward(x) (line 22)
self.assertEqual(str(id2type[17]), "Variable(float32, (3, 7)) -> Variable(float32, (3, 16))") # Attribute self.upward (line 22)
self.assertEqual(str(id2type[18]), "class StatelessLSTM") # Name self (line 22)
self.assertEqual(str(id2type[21]), "Variable(float32, (3, 7))") # Name x (line 22)
self.assertEqual(str(id2type[23]), "NoneType") # If
self.assertEqual(str(id2type[24]), "bool") # Compare (line 23)
self.assertEqual(str(id2type[25]), "Variable(float32, (3, 4))") # Name h (line 23)
self.assertEqual(str(id2type[28]), "NoneType") # Constant None (line 23)
self.assertEqual(str(id2type[29]), "NoneType") # AugAssign
self.assertEqual(str(id2type[30]), "Variable(float32, (3, 16))") # Name lstm_in (line 24)
self.assertEqual(str(id2type[32]), "Variable(float32, (3, 16)) -> Variable(float32, (3, 16)) -> Variable(float32, (3, 16))") # Add
self.assertEqual(str(id2type[33]), "Variable(float32, (3, 16))") # Call self.lateral(h) (line 24)
self.assertEqual(str(id2type[34]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 16))") # Attribute self.lateral (line 24)
self.assertEqual(str(id2type[35]), "class StatelessLSTM") # Name self (line 24)
self.assertEqual(str(id2type[38]), "Variable(float32, (3, 4))") # Name h (line 24)
self.assertEqual(str(id2type[40]), "NoneType") # If
self.assertEqual(str(id2type[41]), "bool") # Compare (line 25)
self.assertEqual(str(id2type[42]), "Variable(float32, (3, 4))") # Name c (line 25)
self.assertEqual(str(id2type[45]), "NoneType") # Constant None (line 25)
self.assertEqual(str(id2type[46]), "NoneType") # Assign
self.assertEqual(str(id2type[47]), "Variable(float32, (3, 4))") # Name c (line 31)
self.assertEqual(str(id2type[49]), "Variable(float32, (3, 4))") # Call variable.Variable(self.xp.zeros((x.shape[0], self.state_size), dtype=self.xp.float32)) (line 31)
self.assertEqual(str(id2type[50]), "ndarray(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute variable.Variable (line 31)
self.assertEqual(str(id2type[54]), "ndarray(float32, (3, 4))") # Call self.xp.zeros((x.shape[0], self.state_size), dtype=self.xp.float32) (line 32)
self.assertEqual(str(id2type[55]), "(int, int) -> ndarray(float32, (3, 4))") # Attribute self.xp.zeros (line 32)
self.assertEqual(str(id2type[56]), "class module") # Attribute self.xp (line 32)
self.assertEqual(str(id2type[57]), "class StatelessLSTM") # Name self (line 32)
self.assertEqual(str(id2type[61]), "(int, int)") # Tuple (x.shape[0], self.state_size) (line 32)
self.assertEqual(str(id2type[62]), "int") # Subscript x.shape[0] (line 32)
self.assertEqual(str(id2type[63]), "(int, int)") # Attribute x.shape (line 32)
self.assertEqual(str(id2type[64]), "Variable(float32, (3, 7))") # Name x (line 32)
self.assertEqual(str(id2type[68]), "int") # Constant 0 (line 32)
self.assertEqual(str(id2type[70]), "int") # Attribute self.state_size (line 32)
self.assertEqual(str(id2type[71]), "class StatelessLSTM") # Name self (line 32)
self.assertEqual(str(id2type[76]), "dtype(float32)") # Attribute self.xp.float32 (line 32)
self.assertEqual(str(id2type[77]), "class module") # Attribute self.xp (line 32)
self.assertEqual(str(id2type[78]), "class StatelessLSTM") # Name self (line 32)
self.assertEqual(str(id2type[82]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Return
self.assertEqual(str(id2type[83]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Call lstm_forward(c, lstm_in) (line 34)
self.assertEqual(str(id2type[84]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 16)) -> (Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name lstm_forward (line 34)
self.assertEqual(str(id2type[86]), "Variable(float32, (3, 4))") # Name c (line 34)
self.assertEqual(str(id2type[88]), "Variable(float32, (3, 16))") # Name lstm_in (line 34)
self.assertEqual(str(id2type[90]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 16)) -> (Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # FunctionDef lstm_forward (line 1)
self.assertEqual(str(id2type[96]), "NoneType") # Assign
self.assertEqual(str(id2type[97]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Tuple (a, i, f, o) (line 2)
self.assertEqual(str(id2type[98]), "Variable(float32, (3, 4))") # Name a (line 2)
self.assertEqual(str(id2type[100]), "Variable(float32, (3, 4))") # Name i (line 2)
self.assertEqual(str(id2type[102]), "Variable(float32, (3, 4))") # Name f (line 2)
self.assertEqual(str(id2type[104]), "Variable(float32, (3, 4))") # Name o (line 2)
self.assertEqual(str(id2type[107]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Call _extract_gates(x) (line 2)
self.assertEqual(str(id2type[108]), "Variable(float32, (3, 16)) -> (Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name _extract_gates (line 2)
self.assertEqual(str(id2type[110]), "Variable(float32, (3, 16))") # Name x (line 2)
self.assertEqual(str(id2type[112]), "NoneType") # Assign
self.assertEqual(str(id2type[113]), "int") # Name batch (line 3)
self.assertEqual(str(id2type[115]), "int") # Call len(x) (line 3)
self.assertEqual(str(id2type[116]), "Variable(float32, (3, 16)) -> int") # Name len (line 3)
self.assertEqual(str(id2type[118]), "Variable(float32, (3, 16))") # Name x (line 3)
self.assertEqual(str(id2type[120]), "NoneType") # Assign
self.assertEqual(str(id2type[121]), "Variable(float32, (3, 4))") # Name a (line 5)
self.assertEqual(str(id2type[123]), "Variable(float32, (3, 4))") # Call F.tanh(a) (line 5)
self.assertEqual(str(id2type[124]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.tanh (line 5)
self.assertEqual(str(id2type[128]), "Variable(float32, (3, 4))") # Name a (line 5)
self.assertEqual(str(id2type[130]), "NoneType") # Assign
self.assertEqual(str(id2type[131]), "Variable(float32, (3, 4))") # Name i (line 6)
self.assertEqual(str(id2type[133]), "Variable(float32, (3, 4))") # Call F.sigmoid(i) (line 6)
self.assertEqual(str(id2type[134]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.sigmoid (line 6)
self.assertEqual(str(id2type[138]), "Variable(float32, (3, 4))") # Name i (line 6)
self.assertEqual(str(id2type[140]), "NoneType") # Assign
self.assertEqual(str(id2type[141]), "Variable(float32, (3, 4))") # Name f (line 7)
self.assertEqual(str(id2type[143]), "Variable(float32, (3, 4))") # Call F.sigmoid(f) (line 7)
self.assertEqual(str(id2type[144]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.sigmoid (line 7)
self.assertEqual(str(id2type[148]), "Variable(float32, (3, 4))") # Name f (line 7)
self.assertEqual(str(id2type[150]), "NoneType") # Assign
self.assertEqual(str(id2type[151]), "Variable(float32, (3, 4))") # Name o (line 8)
self.assertEqual(str(id2type[153]), "Variable(float32, (3, 4))") # Call F.sigmoid(o) (line 8)
self.assertEqual(str(id2type[154]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.sigmoid (line 8)
self.assertEqual(str(id2type[158]), "Variable(float32, (3, 4))") # Name o (line 8)
self.assertEqual(str(id2type[160]), "NoneType") # Assign
self.assertEqual(str(id2type[161]), "Variable(float32, (3, 4))") # Name c_next (line 10)
self.assertEqual(str(id2type[163]), "Variable(float32, (3, 4))") # BinOp a * i + f * c_prev (line 10)
self.assertEqual(str(id2type[164]), "Variable(float32, (3, 4))") # BinOp a * i (line 10)
self.assertEqual(str(id2type[165]), "Variable(float32, (3, 4))") # Name a (line 10)
self.assertEqual(str(id2type[167]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Mult
self.assertEqual(str(id2type[168]), "Variable(float32, (3, 4))") # Name i (line 10)
self.assertEqual(str(id2type[170]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Add
self.assertEqual(str(id2type[171]), "Variable(float32, (3, 4))") # BinOp f * c_prev (line 10)
self.assertEqual(str(id2type[172]), "Variable(float32, (3, 4))") # Name f (line 10)
self.assertEqual(str(id2type[174]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Mult
self.assertEqual(str(id2type[175]), "Variable(float32, (3, 4))") # Name c_prev (line 10)
self.assertEqual(str(id2type[177]), "NoneType") # Assign
self.assertEqual(str(id2type[178]), "Variable(float32, (3, 4))") # Name h (line 11)
self.assertEqual(str(id2type[180]), "Variable(float32, (3, 4))") # BinOp o * F.tanh(c_next) (line 11)
self.assertEqual(str(id2type[181]), "Variable(float32, (3, 4))") # Name o (line 11)
self.assertEqual(str(id2type[183]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Mult
self.assertEqual(str(id2type[184]), "Variable(float32, (3, 4))") # Call F.tanh(c_next) (line 11)
self.assertEqual(str(id2type[185]), "Variable(float32, (3, 4)) -> Variable(float32, (3, 4))") # Attribute F.tanh (line 11)
self.assertEqual(str(id2type[189]), "Variable(float32, (3, 4))") # Name c_next (line 11)
self.assertEqual(str(id2type[191]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Return
self.assertEqual(str(id2type[192]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Tuple (c_next, h) (line 12)
self.assertEqual(str(id2type[193]), "Variable(float32, (3, 4))") # Name c_next (line 12)
self.assertEqual(str(id2type[195]), "Variable(float32, (3, 4))") # Name h (line 12)
self.assertEqual(str(id2type[198]), "Variable(float32, (3, 16)) -> (Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # FunctionDef _extract_gates (line 1)
self.assertEqual(str(id2type[202]), "NoneType") # Assign
self.assertEqual(str(id2type[203]), "Variable(float32, (3, 4, 4))") # Name r (line 2)
self.assertEqual(str(id2type[205]), "Variable(float32, (3, 4, 4))") # Call F.reshape(x, (len(x), x.shape[1] // 4, 4) + x.shape[2::]) (line 2)
self.assertEqual(str(id2type[206]), "Variable(float32, (3, 16)) -> (int, int, int) -> Variable(float32, (3, 4, 4))") # Attribute F.reshape (line 2)
self.assertEqual(str(id2type[210]), "Variable(float32, (3, 16))") # Name x (line 2)
self.assertEqual(str(id2type[212]), "(int, int, int)") # BinOp (len(x), x.shape[1] // 4, 4) + x.shape[2::] (line 2)
self.assertEqual(str(id2type[213]), "(int, int, int)") # Tuple (len(x), x.shape[1] // 4, 4) (line 2)
self.assertEqual(str(id2type[214]), "int") # Call len(x) (line 2)
self.assertEqual(str(id2type[215]), "Variable(float32, (3, 16)) -> int") # Name len (line 2)
self.assertEqual(str(id2type[217]), "Variable(float32, (3, 16))") # Name x (line 2)
self.assertEqual(str(id2type[219]), "int") # BinOp x.shape[1] // 4 (line 2)
self.assertEqual(str(id2type[220]), "int") # Subscript x.shape[1] (line 2)
self.assertEqual(str(id2type[221]), "(int, int)") # Attribute x.shape (line 2)
self.assertEqual(str(id2type[222]), "Variable(float32, (3, 16))") # Name x (line 2)
self.assertEqual(str(id2type[226]), "int") # Constant 1 (line 2)
self.assertEqual(str(id2type[228]), "int -> int -> int") # FloorDiv
self.assertEqual(str(id2type[229]), "int") # Constant 4 (line 2)
self.assertEqual(str(id2type[230]), "int") # Constant 4 (line 2)
self.assertEqual(str(id2type[232]), "(int, int, int) -> () -> (int, int, int)") # Add
self.assertEqual(str(id2type[233]), "()") # Subscript x.shape[2::] (line 2)
self.assertEqual(str(id2type[234]), "(int, int)") # Attribute x.shape (line 2)
self.assertEqual(str(id2type[235]), "Variable(float32, (3, 16))") # Name x (line 2)
self.assertEqual(str(id2type[239]), "int") # Constant 2 (line 2)
self.assertEqual(str(id2type[241]), "NoneType") # Assign
self.assertEqual(str(id2type[242]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name r (line 3)
self.assertEqual(str(id2type[244]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Call F.separate(r, axis=2) (line 3)
self.assertEqual(str(id2type[245]), "Variable(float32, (3, 4, 4)) -> (Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Attribute F.separate (line 3)
self.assertEqual(str(id2type[249]), "Variable(float32, (3, 4, 4))") # Name r (line 3)
self.assertEqual(str(id2type[252]), "int") # Constant 2 (line 3)
self.assertEqual(str(id2type[253]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Return
self.assertEqual(str(id2type[254]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Tuple (r[0], r[1], r[2], r[3]) (line 4)
self.assertEqual(str(id2type[255]), "Variable(float32, (3, 4))") # Subscript r[0] (line 4)
self.assertEqual(str(id2type[256]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name r (line 4)
self.assertEqual(str(id2type[259]), "int") # Constant 0 (line 4)
self.assertEqual(str(id2type[261]), "Variable(float32, (3, 4))") # Subscript r[1] (line 4)
self.assertEqual(str(id2type[262]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name r (line 4)
self.assertEqual(str(id2type[265]), "int") # Constant 1 (line 4)
self.assertEqual(str(id2type[267]), "Variable(float32, (3, 4))") # Subscript r[2] (line 4)
self.assertEqual(str(id2type[268]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name r (line 4)
self.assertEqual(str(id2type[271]), "int") # Constant 2 (line 4)
self.assertEqual(str(id2type[273]), "Variable(float32, (3, 4))") # Subscript r[3] (line 4)
self.assertEqual(str(id2type[274]), "(Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)), Variable(float32, (3, 4)))") # Name r (line 4)
self.assertEqual(str(id2type[277]), "int") # Constant 3 (line 4)
# === END ASSERTIONS for StatelessLSTM ===
def test_VGG2L(self):
model, forward_args = gen_VGG2L_model()
id2type = generate_id2type_from_forward(model, forward_args)
# === BEGIN ASSERTIONS for VGG2L ===
self.assertEqual(str(id2type[1]), "class VGG2L -> [ndarray(float32, (4, 5)), ndarray(float32, (2, 5)), ndarray(float32, (2, 5))] -> ndarray(int64, (3,)) -> (Variable(float32, (None, 256)) list, ndarray(int64, (3,)))") # FunctionDef forward (line 1)
self.assertEqual(str(id2type[9]), "NoneType") # Expr
self.assertEqual(str(id2type[10]), "string") # Constant "..." (line 7)
self.assertEqual(str(id2type[11]), "NoneType") # Expr
self.assertEqual(str(id2type[12]), "NoneType") # Call logging.info(self.__class__.__name__ + ' input lengths: ' + str(ilens)) (line 8)
self.assertEqual(str(id2type[33]), "NoneType") # Assign
self.assertEqual(str(id2type[34]), "Variable(float32, (3, 4, 5))") # Name xs (line 11)
self.assertEqual(str(id2type[36]), "Variable(float32, (3, 4, 5))") # Call F.pad_sequence(xs) (line 11)
self.assertEqual(str(id2type[37]), "[ndarray(float32, (4, 5)), ndarray(float32, (2, 5)), ndarray(float32, (2, 5))] -> Variable(float32, (3, 4, 5))") # Attribute F.pad_sequence (line 11)
self.assertEqual(str(id2type[41]), "[ndarray(float32, (4, 5)), ndarray(float32, (2, 5)), ndarray(float32, (2, 5))]") # Name xs (line 11)
self.assertEqual(str(id2type[43]), "NoneType") # Assign
self.assertEqual(str(id2type[44]), "Variable(float32, (3, 1, 4, 5))") # Name xs (line 14)
self.assertEqual(str(id2type[46]), "Variable(float32, (3, 1, 4, 5))") # Call F.swapaxes(F.reshape(xs, (xs.shape[0], xs.shape[1], self.in_channel, xs.shape[2] // self.in_channel)), 1, 2) (line 14)
self.assertEqual(str(id2type[47]), "Variable(float32, (3, 4, 1, 5)) -> int -> int -> Variable(float32, (3, 1, 4, 5))") # Attribute F.swapaxes (line 14)
self.assertEqual(str(id2type[51]), "Variable(float32, (3, 4, 1, 5))") # Call F.reshape(xs, (xs.shape[0], xs.shape[1], self.in_channel, xs.shape[2] // self.in_channel)) (line 14)
self.assertEqual(str(id2type[52]), "Variable(float32, (3, 4, 5)) -> (int, int, int, int) -> Variable(float32, (3, 4, 1, 5))") # Attribute F.reshape (line 14)
self.assertEqual(str(id2type[56]), "Variable(float32, (3, 4, 5))") # Name xs (line 15)
self.assertEqual(str(id2type[58]), "(int, int, int, int)") # Tuple (xs.shape[0], xs.shape[1], self.in_channel, xs.shape[2] // self.in_channel) (line 15)
self.assertEqual(str(id2type[59]), "int") # Subscript xs.shape[0] (line 15)
self.assertEqual(str(id2type[60]), "(int, int, int)") # Attribute xs.shape (line 15)
self.assertEqual(str(id2type[61]), "Variable(float32, (3, 4, 5))") # Name xs (line 15)
self.assertEqual(str(id2type[65]), "int") # Constant 0 (line 15)
self.assertEqual(str(id2type[67]), "int") # Subscript xs.shape[1] (line 15)
self.assertEqual(str(id2type[68]), "(int, int, int)") # Attribute xs.shape (line 15)
self.assertEqual(str(id2type[69]), "Variable(float32, (3, 4, 5))") # Name xs (line 15)
self.assertEqual(str(id2type[73]), "int") # Constant 1 (line 15)
self.assertEqual(str(id2type[75]), "int") # Attribute self.in_channel (line 15)
self.assertEqual(str(id2type[76]), "class VGG2L") # Name self (line 15)
self.assertEqual(str(id2type[79]), "int") # BinOp xs.shape[2] // self.in_channel (line 15)
self.assertEqual(str(id2type[80]), "int") # Subscript xs.shape[2] (line 15)
self.assertEqual(str(id2type[81]), "(int, int, int)") # Attribute xs.shape (line 15)
self.assertEqual(str(id2type[82]), "Variable(float32, (3, 4, 5))") # Name xs (line 15)
self.assertEqual(str(id2type[86]), "int") # Constant 2 (line 15)
self.assertEqual(str(id2type[88]), "int -> int -> int") # FloorDiv
self.assertEqual(str(id2type[89]), "int") # Attribute self.in_channel (line 15)
self.assertEqual(str(id2type[90]), "class VGG2L") # Name self (line 15)
self.assertEqual(str(id2type[94]), "int") # Constant 1 (line 15)
self.assertEqual(str(id2type[95]), "int") # Constant 2 (line 15)
self.assertEqual(str(id2type[96]), "NoneType") # Assign
self.assertEqual(str(id2type[97]), "Variable(float32, (3, 64, 4, 5))") # Name xs (line 17)
self.assertEqual(str(id2type[99]), "Variable(float32, (3, 64, 4, 5))") # Call F.relu(self.conv1_1(xs)) (line 17)
self.assertEqual(str(id2type[100]), "Variable(float32, (3, 64, 4, 5)) -> Variable(float32, (3, 64, 4, 5))") # Attribute F.relu (line 17)
self.assertEqual(str(id2type[104]), "Variable(float32, (3, 64, 4, 5))") # Call self.conv1_1(xs) (line 17)
self.assertEqual(str(id2type[105]), "Variable(float32, (3, 1, 4, 5)) -> Variable(float32, (3, 64, 4, 5))") # Attribute self.conv1_1 (line 17)
self.assertEqual(str(id2type[106]), "class VGG2L") # Name self (line 17)
self.assertEqual(str(id2type[109]), "Variable(float32, (3, 1, 4, 5))") # Name xs (line 17)
self.assertEqual(str(id2type[111]), "NoneType") # Assign
self.assertEqual(str(id2type[112]), "Variable(float32, (3, 64, 4, 5))") # Name xs (line 18)
self.assertEqual(str(id2type[114]), "Variable(float32, (3, 64, 4, 5))") # Call F.relu(self.conv1_2(xs)) (line 18)
self.assertEqual(str(id2type[115]), "Variable(float32, (3, 64, 4, 5)) -> Variable(float32, (3, 64, 4, 5))") # Attribute F.relu (line 18)
self.assertEqual(str(id2type[119]), "Variable(float32, (3, 64, 4, 5))") # Call self.conv1_2(xs) (line 18)
self.assertEqual(str(id2type[120]), "Variable(float32, (3, 64, 4, 5)) -> Variable(float32, (3, 64, 4, 5))") # Attribute self.conv1_2 (line 18)
self.assertEqual(str(id2type[121]), "class VGG2L") # Name self (line 18)
self.assertEqual(str(id2type[124]), "Variable(float32, (3, 64, 4, 5))") # Name xs (line 18)
self.assertEqual(str(id2type[126]), "NoneType") # Assign
self.assertEqual(str(id2type[127]), "Variable(float32, (3, 64, 2, 3))") # Name xs (line 19)
self.assertEqual(str(id2type[129]), "Variable(float32, (3, 64, 2, 3))") # Call F.max_pooling_2d(xs, 2, stride=2) (line 19)
self.assertEqual(str(id2type[130]), "Variable(float32, (3, 64, 4, 5)) -> int -> Variable(float32, (3, 64, 2, 3))") # Attribute F.max_pooling_2d (line 19)
self.assertEqual(str(id2type[134]), "Variable(float32, (3, 64, 4, 5))") # Name xs (line 19)
self.assertEqual(str(id2type[136]), "int") # Constant 2 (line 19)
self.assertEqual(str(id2type[138]), "int") # Constant 2 (line 19)
self.assertEqual(str(id2type[139]), "NoneType") # Assign
self.assertEqual(str(id2type[140]), "Variable(float32, (3, 128, 2, 3))") # Name xs (line 21)
self.assertEqual(str(id2type[142]), "Variable(float32, (3, 128, 2, 3))") # Call F.relu(self.conv2_1(xs)) (line 21)
self.assertEqual(str(id2type[143]), "Variable(float32, (3, 128, 2, 3)) -> Variable(float32, (3, 128, 2, 3))") # Attribute F.relu (line 21)
self.assertEqual(str(id2type[147]), "Variable(float32, (3, 128, 2, 3))") # Call self.conv2_1(xs) (line 21)
self.assertEqual(str(id2type[148]), "Variable(float32, (3, 64, 2, 3)) -> Variable(float32, (3, 128, 2, 3))") # Attribute self.conv2_1 (line 21)
self.assertEqual(str(id2type[149]), "class VGG2L") # Name self (line 21)
self.assertEqual(str(id2type[152]), "Variable(float32, (3, 64, 2, 3))") # Name xs (line 21)
self.assertEqual(str(id2type[154]), "NoneType") # Assign
self.assertEqual(str(id2type[155]), "Variable(float32, (3, 128, 2, 3))") # Name xs (line 22)
self.assertEqual(str(id2type[157]), "Variable(float32, (3, 128, 2, 3))") # Call F.relu(self.conv2_2(xs)) (line 22)
self.assertEqual(str(id2type[158]), "Variable(float32, (3, 128, 2, 3)) -> Variable(float32, (3, 128, 2, 3))") # Attribute F.relu (line 22)
self.assertEqual(str(id2type[162]), "Variable(float32, (3, 128, 2, 3))") # Call self.conv2_2(xs) (line 22)
self.assertEqual(str(id2type[163]), "Variable(float32, (3, 128, 2, 3)) -> Variable(float32, (3, 128, 2, 3))") # Attribute self.conv2_2 (line 22)
self.assertEqual(str(id2type[164]), "class VGG2L") # Name self (line 22)
self.assertEqual(str(id2type[167]), "Variable(float32, (3, 128, 2, 3))") # Name xs (line 22)
self.assertEqual(str(id2type[169]), "NoneType") # Assign
self.assertEqual(str(id2type[170]), "Variable(float32, (3, 128, 1, 2))") # Name xs (line 23)
self.assertEqual(str(id2type[172]), "Variable(float32, (3, 128, 1, 2))") # Call F.max_pooling_2d(xs, 2, stride=2) (line 23)
self.assertEqual(str(id2type[173]), "Variable(float32, (3, 128, 2, 3)) -> int -> Variable(float32, (3, 128, 1, 2))") # Attribute F.max_pooling_2d (line 23)
self.assertEqual(str(id2type[177]), "Variable(float32, (3, 128, 2, 3))") # Name xs (line 23)
self.assertEqual(str(id2type[179]), "int") # Constant 2 (line 23)
self.assertEqual(str(id2type[181]), "int") # Constant 2 (line 23)
self.assertEqual(str(id2type[182]), "NoneType") # Assign
self.assertEqual(str(id2type[183]), "ndarray(int64, (3,))") # Name ilens (line 28)
self.assertEqual(str(id2type[185]), "ndarray(int64, (3,))") # BinOp ilens + 1 // 2 (line 28)
self.assertEqual(str(id2type[186]), "ndarray(int64, (3,))") # BinOp ilens + 1 (line 28)
self.assertEqual(str(id2type[187]), "ndarray(int64, (3,))") # Name ilens (line 28)
self.assertEqual(str(id2type[189]), "ndarray(int64, (3,)) -> int -> ndarray(int64, (3,))") # Add
self.assertEqual(str(id2type[190]), "int") # Constant 1 (line 28)
self.assertEqual(str(id2type[191]), "ndarray(int64, (3,)) -> int -> ndarray(int64, (3,))") # FloorDiv
self.assertEqual(str(id2type[192]), "int") # Constant 2 (line 28)
self.assertEqual(str(id2type[193]), "NoneType") # Assign
self.assertEqual(str(id2type[194]), "ndarray(int64, (3,))") # Name ilens (line 29)
self.assertEqual(str(id2type[196]), "ndarray(int64, (3,))") # BinOp ilens + 1 // 2 (line 29)
self.assertEqual(str(id2type[197]), "ndarray(int64, (3,))") # BinOp ilens + 1 (line 29)
self.assertEqual(str(id2type[198]), "ndarray(int64, (3,))") # Name ilens (line 29)
self.assertEqual(str(id2type[200]), "ndarray(int64, (3,)) -> int -> ndarray(int64, (3,))") # Add
self.assertEqual(str(id2type[201]), "int") # Constant 1 (line 29)
self.assertEqual(str(id2type[202]), "ndarray(int64, (3,)) -> int -> ndarray(int64, (3,))") # FloorDiv
self.assertEqual(str(id2type[203]), "int") # Constant 2 (line 29)
self.assertEqual(str(id2type[204]), "NoneType") # Assign
self.assertEqual(str(id2type[205]), "Variable(float32, (3, 1, 128, 2))") # Name xs (line 36)
self.assertEqual(str(id2type[207]), "Variable(float32, (3, 1, 128, 2))") # Call F.swapaxes(xs, 1, 2) (line 36)
self.assertEqual(str(id2type[208]), "Variable(float32, (3, 128, 1, 2)) -> int -> int -> Variable(float32, (3, 1, 128, 2))") # Attribute F.swapaxes (line 36)
self.assertEqual(str(id2type[212]), "Variable(float32, (3, 128, 1, 2))") # Name xs (line 36)
self.assertEqual(str(id2type[214]), "int") # Constant 1 (line 36)
self.assertEqual(str(id2type[215]), "int") # Constant 2 (line 36)
self.assertEqual(str(id2type[216]), "NoneType") # Assign
self.assertEqual(str(id2type[217]), "Variable(float32, (3, 1, 256))") # Name xs (line 37)
self.assertEqual(str(id2type[219]), "Variable(float32, (3, 1, 256))") # Call F.reshape(xs, (xs.shape[0], xs.shape[1], xs.shape[2] * xs.shape[3])) (line 37)
self.assertEqual(str(id2type[220]), "Variable(float32, (3, 1, 128, 2)) -> (int, int, int) -> Variable(float32, (3, 1, 256))") # Attribute F.reshape (line 37)
self.assertEqual(str(id2type[224]), "Variable(float32, (3, 1, 128, 2))") # Name xs (line 38)
self.assertEqual(str(id2type[226]), "(int, int, int)") # Tuple (xs.shape[0], xs.shape[1], xs.shape[2] * xs.shape[3]) (line 38)
self.assertEqual(str(id2type[227]), "int") # Subscript xs.shape[0] (line 38)
self.assertEqual(str(id2type[228]), "(int, int, int, int)") # Attribute xs.shape (line 38)
self.assertEqual(str(id2type[229]), "Variable(float32, (3, 1, 128, 2))") # Name xs (line 38)
self.assertEqual(str(id2type[233]), "int") # Constant 0 (line 38)
self.assertEqual(str(id2type[235]), "int") # Subscript xs.shape[1] (line 38)
self.assertEqual(str(id2type[236]), "(int, int, int, int)") # Attribute xs.shape (line 38)
self.assertEqual(str(id2type[237]), "Variable(float32, (3, 1, 128, 2))") # Name xs (line 38)
self.assertEqual(str(id2type[241]), "int") # Constant 1 (line 38)
self.assertEqual(str(id2type[243]), "int") # BinOp xs.shape[2] * xs.shape[3] (line 38)
self.assertEqual(str(id2type[244]), "int") # Subscript xs.shape[2] (line 38)
self.assertEqual(str(id2type[245]), "(int, int, int, int)") # Attribute xs.shape (line 38)
self.assertEqual(str(id2type[246]), "Variable(float32, (3, 1, 128, 2))") # Name xs (line 38)
self.assertEqual(str(id2type[250]), "int") # Constant 2 (line 38)
self.assertEqual(str(id2type[252]), "int -> int -> int") # Mult
self.assertEqual(str(id2type[253]), "int") # Subscript xs.shape[3] (line 38)
self.assertEqual(str(id2type[254]), "(int, int, int, int)") # Attribute xs.shape (line 38)
self.assertEqual(str(id2type[255]), "Variable(float32, (3, 1, 128, 2))") # Name xs (line 38)
self.assertEqual(str(id2type[259]), "int") # Constant 3 (line 38)
self.assertEqual(str(id2type[262]), "NoneType") # Assign
self.assertEqual(str(id2type[263]), "Variable(float32, (None, 256)) list") # Name xs (line 39)
self.assertEqual(str(id2type[265]), "Variable(float32, (None, 256)) list") # ListComp (line 39)
self.assertEqual(str(id2type[266]), "Variable(float32, (None, 256))") # Subscript xs[i, :ilens[i]:, ::] (line 39)
self.assertEqual(str(id2type[267]), "Variable(float32, (3, 1, 256))") # Name xs (line 39)
self.assertEqual(str(id2type[271]), "int") # Name i (line 39)
self.assertEqual(str(id2type[274]), "ndarray(int64, ())") # Subscript ilens[i] (line 39)
self.assertEqual(str(id2type[275]), "ndarray(int64, (3,))") # Name ilens (line 39)
self.assertEqual(str(id2type[278]), "int") # Name i (line 39)
self.assertEqual(str(id2type[284]), "int") # Name i (line 39)
self.assertEqual(str(id2type[286]), "int list") # Call range(len(ilens)) (line 39)
self.assertEqual(str(id2type[287]), "int -> int list") # Name range (line 39)
self.assertEqual(str(id2type[289]), "int") # Call len(ilens) (line 39)
self.assertEqual(str(id2type[290]), "ndarray(int64, (3,)) -> int") # Name len (line 39)
self.assertEqual(str(id2type[292]), "ndarray(int64, (3,))") # Name ilens (line 39)
self.assertEqual(str(id2type[294]), "(Variable(float32, (None, 256)) list, ndarray(int64, (3,)))") # Return
self.assertEqual(str(id2type[295]), "(Variable(float32, (None, 256)) list, ndarray(int64, (3,)))") # Tuple (xs, ilens) (line 41)
self.assertEqual(str(id2type[296]), "Variable(float32, (None, 256)) list") # Name xs (line 41)
self.assertEqual(str(id2type[298]), "ndarray(int64, (3,))") # Name ilens (line 41)
# === END ASSERTIONS for VGG2L ===
def test_BLSTM(self):
model, forward_args = gen_BLSTM_model()
id2type = generate_id2type_from_forward(model, forward_args)
# === BEGIN ASSERTIONS for BLSTM ===
self.assertEqual(str(id2type[1]), "class BLSTM -> [ndarray(float32, (4, 5)), ndarray(float32, (2, 5)), ndarray(float32, (2, 5))] -> ndarray(int64, (3,)) -> ((Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7))), ndarray(int64, (3,)))") # FunctionDef forward (line 1)
self.assertEqual(str(id2type[9]), "NoneType") # Expr
self.assertEqual(str(id2type[10]), "string") # Constant "..." (line 7)
self.assertEqual(str(id2type[11]), "NoneType") # Expr
self.assertEqual(str(id2type[12]), "NoneType") # Call logging.info(self.__class__.__name__ + ' input lengths: ' + str(ilens)) (line 8)
self.assertEqual(str(id2type[33]), "NoneType") # Assign
self.assertEqual(str(id2type[34]), "ndarray(int64, (3,))") # Name ilens (line 10)
self.assertEqual(str(id2type[36]), "ndarray(int64, (3,))") # Call cuda.to_cpu(ilens) (line 10)
self.assertEqual(str(id2type[37]), "ndarray(int64, (3,)) -> ndarray(int64, (3,))") # Attribute cuda.to_cpu (line 10)
self.assertEqual(str(id2type[41]), "ndarray(int64, (3,))") # Name ilens (line 10)
self.assertEqual(str(id2type[43]), "NoneType") # Assign
self.assertEqual(str(id2type[44]), "(Variable(float32, (4, 3, 3)), Variable(float32, (4, 3, 3)), [Variable(float32, (4, 6)), Variable(float32, (2, 6)), Variable(float32, (2, 6))])") # Tuple (hy, cy, ys) (line 11)
self.assertEqual(str(id2type[45]), "Variable(float32, (4, 3, 3))") # Name hy (line 11)
self.assertEqual(str(id2type[47]), "Variable(float32, (4, 3, 3))") # Name cy (line 11)
self.assertEqual(str(id2type[49]), "[Variable(float32, (4, 6)), Variable(float32, (2, 6)), Variable(float32, (2, 6))]") # Name ys (line 11)
self.assertEqual(str(id2type[52]), "(Variable(float32, (4, 3, 3)), Variable(float32, (4, 3, 3)), [Variable(float32, (4, 6)), Variable(float32, (2, 6)), Variable(float32, (2, 6))])") # Call self.nblstm(None, None, xs) (line 11)
self.assertEqual(str(id2type[53]), "NoneType -> NoneType -> [ndarray(float32, (4, 5)), ndarray(float32, (2, 5)), ndarray(float32, (2, 5))] -> (Variable(float32, (4, 3, 3)), Variable(float32, (4, 3, 3)), [Variable(float32, (4, 6)), Variable(float32, (2, 6)), Variable(float32, (2, 6))])") # Attribute self.nblstm (line 11)
self.assertEqual(str(id2type[54]), "class BLSTM") # Name self (line 11)
self.assertEqual(str(id2type[57]), "NoneType") # Constant None (line 11)
self.assertEqual(str(id2type[58]), "NoneType") # Constant None (line 11)
self.assertEqual(str(id2type[59]), "[ndarray(float32, (4, 5)), ndarray(float32, (2, 5)), ndarray(float32, (2, 5))]") # Name xs (line 11)
self.assertEqual(str(id2type[61]), "NoneType") # Assign
self.assertEqual(str(id2type[62]), "Variable(float32, (8, 7))") # Name ys (line 12)
self.assertEqual(str(id2type[64]), "Variable(float32, (8, 7))") # Call self.l_last(F.vstack(ys)) (line 12)
self.assertEqual(str(id2type[65]), "Variable(float32, (8, 6)) -> Variable(float32, (8, 7))") # Attribute self.l_last (line 12)
self.assertEqual(str(id2type[66]), "class BLSTM") # Name self (line 12)
self.assertEqual(str(id2type[69]), "Variable(float32, (8, 6))") # Call F.vstack(ys) (line 12)
self.assertEqual(str(id2type[70]), "[Variable(float32, (4, 6)), Variable(float32, (2, 6)), Variable(float32, (2, 6))] -> Variable(float32, (8, 6))") # Attribute F.vstack (line 12)
self.assertEqual(str(id2type[74]), "[Variable(float32, (4, 6)), Variable(float32, (2, 6)), Variable(float32, (2, 6))]") # Name ys (line 12)
self.assertEqual(str(id2type[76]), "NoneType") # Assign
self.assertEqual(str(id2type[77]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Name xs (line 13)
self.assertEqual(str(id2type[79]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Call F.split_axis(ys, np.cumsum(ilens[:-1:]), 0) (line 13)
self.assertEqual(str(id2type[80]), "Variable(float32, (8, 7)) -> ndarray(int64, (2,)) -> int -> (Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Attribute F.split_axis (line 13)
self.assertEqual(str(id2type[84]), "Variable(float32, (8, 7))") # Name ys (line 13)
self.assertEqual(str(id2type[86]), "ndarray(int64, (2,))") # Call np.cumsum(ilens[:-1:]) (line 13)
self.assertEqual(str(id2type[87]), "ndarray(int64, (2,)) -> ndarray(int64, (2,))") # Attribute np.cumsum (line 13)
self.assertEqual(str(id2type[91]), "ndarray(int64, (2,))") # Subscript ilens[:-1:] (line 13)
self.assertEqual(str(id2type[92]), "ndarray(int64, (3,))") # Name ilens (line 13)
self.assertEqual(str(id2type[95]), "int") # UnaryOp -1 (line 13)
self.assertEqual(str(id2type[97]), "int") # Constant 1 (line 13)
self.assertEqual(str(id2type[99]), "int") # Constant 0 (line 13)
self.assertEqual(str(id2type[100]), "NoneType") # Delete
self.assertEqual(str(id2type[105]), "NoneType") # Assign
self.assertEqual(str(id2type[106]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Name xs (line 17)
self.assertEqual(str(id2type[108]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Call F.split_axis(F.tanh(F.vstack(xs)), np.cumsum(ilens[:-1:]), 0) (line 17)
self.assertEqual(str(id2type[109]), "Variable(float32, (None, 7)) -> ndarray(int64, (2,)) -> int -> (Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Attribute F.split_axis (line 17)
self.assertEqual(str(id2type[113]), "Variable(float32, (None, 7))") # Call F.tanh(F.vstack(xs)) (line 17)
self.assertEqual(str(id2type[114]), "Variable(float32, (None, 7)) -> Variable(float32, (None, 7))") # Attribute F.tanh (line 17)
self.assertEqual(str(id2type[118]), "Variable(float32, (None, 7))") # Call F.vstack(xs) (line 17)
self.assertEqual(str(id2type[119]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7))) -> Variable(float32, (None, 7))") # Attribute F.vstack (line 17)
self.assertEqual(str(id2type[123]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Name xs (line 17)
self.assertEqual(str(id2type[125]), "ndarray(int64, (2,))") # Call np.cumsum(ilens[:-1:]) (line 17)
self.assertEqual(str(id2type[126]), "ndarray(int64, (2,)) -> ndarray(int64, (2,))") # Attribute np.cumsum (line 17)
self.assertEqual(str(id2type[130]), "ndarray(int64, (2,))") # Subscript ilens[:-1:] (line 17)
self.assertEqual(str(id2type[131]), "ndarray(int64, (3,))") # Name ilens (line 17)
self.assertEqual(str(id2type[134]), "int") # UnaryOp -1 (line 17)
self.assertEqual(str(id2type[136]), "int") # Constant 1 (line 17)
self.assertEqual(str(id2type[138]), "int") # Constant 0 (line 17)
self.assertEqual(str(id2type[139]), "((Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7))), ndarray(int64, (3,)))") # Return
self.assertEqual(str(id2type[140]), "((Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7))), ndarray(int64, (3,)))") # Tuple (xs, ilens) (line 24)
self.assertEqual(str(id2type[141]), "(Variable(float32, (None, 7)), Variable(float32, (None, 7)), Variable(float32, (None, 7)))") # Name xs (line 24)
self.assertEqual(str(id2type[143]), "ndarray(int64, (3,))") # Name ilens (line 24)
# === END ASSERTIONS for BLSTM ===
def main():
unittest.main()
if __name__ == '__main__':
main()
| 87.886221
| 329
| 0.63037
| 11,751
| 84,195
| 4.47213
| 0.047656
| 0.215501
| 0.258601
| 0.359168
| 0.936158
| 0.894543
| 0.755005
| 0.657501
| 0.565916
| 0.462532
| 0
| 0.104117
| 0.174666
| 84,195
| 957
| 330
| 87.978056
| 0.652146
| 0.234289
| 0
| 0.220736
| 1
| 0.118172
| 0.343029
| 0
| 0
| 0
| 0
| 0
| 0.841695
| 1
| 0.014493
| false
| 0
| 0.014493
| 0
| 0.037904
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
30840d762f731c35d14104dea509d2c6b960ead1
| 192
|
py
|
Python
|
kalkulacka_nas_model.py
|
aajinka/other_files
|
623ad62b2dacf07ba14ce89d13748cb1d574684f
|
[
"MIT"
] | 1
|
2021-06-11T11:29:48.000Z
|
2021-06-11T11:29:48.000Z
|
kalkulacka_nas_model.py
|
aajinka/other_files
|
623ad62b2dacf07ba14ce89d13748cb1d574684f
|
[
"MIT"
] | null | null | null |
kalkulacka_nas_model.py
|
aajinka/other_files
|
623ad62b2dacf07ba14ce89d13748cb1d574684f
|
[
"MIT"
] | null | null | null |
def secti(a,b):
return float(a) + float(b)
def odecti(a,b):
return float(a) - float(b)
def podil(a,b):
return float(a) / float(b)
def soucin(a,b):
return float(a) * float(b)
| 17.454545
| 30
| 0.59375
| 36
| 192
| 3.166667
| 0.25
| 0.070175
| 0.280702
| 0.45614
| 0.780702
| 0.780702
| 0.780702
| 0.605263
| 0
| 0
| 0
| 0
| 0.21875
| 192
| 11
| 31
| 17.454545
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
eb5aee2648569816ec82d8f3d9f4b01c1bc48721
| 10,539
|
py
|
Python
|
fastx_barber/tests/test_flag.py
|
sotiriszogos/fastx-barber
|
864720c1e3e5ec1d7742c44775ba3250614c2aa5
|
[
"MIT"
] | 2
|
2020-08-21T07:22:43.000Z
|
2020-11-05T14:06:47.000Z
|
fastx_barber/tests/test_flag.py
|
sotiriszogos/fastx-barber
|
864720c1e3e5ec1d7742c44775ba3250614c2aa5
|
[
"MIT"
] | 133
|
2020-08-01T14:08:27.000Z
|
2022-03-28T08:21:05.000Z
|
fastx_barber/tests/test_flag.py
|
sotiriszogos/fastx-barber
|
864720c1e3e5ec1d7742c44775ba3250614c2aa5
|
[
"MIT"
] | null | null | null |
"""
@author: Gabriele Girelli
@contact: gigi.ga90@gmail.com
"""
from fastx_barber import const, flag, match, random, seqio
import regex # type: ignore
from typing import Dict
def test_FlagStats():
fs = flag.FlagStats()
fs.update({const.UT_FLAG_NAME: ("value", 0, 0)})
assert 0 == fs.get_dataframe(const.UT_FLAG_NAME).shape[0]
fs = flag.FlagStats(const.UT_FLAG_NAME)
fs.update({const.UT_FLAG_NAME: ("value", 0, 0)})
assert 1 == fs.get_dataframe(const.UT_FLAG_NAME).shape[0]
def assert_FastaFlagExtractor_update(
fe: flag.FastaFlagExtractor,
record: seqio.SimpleFastxRecord,
flag_data: Dict[str, const.FlagData],
) -> None:
expected_name = f"{record[0]}{fe.flag_delim}{fe.flag_delim}{const.UT_FLAG_NAME}"
expected_name += f"{fe.flag_delim}{flag_data[const.UT_FLAG_NAME][0]}"
updated_name, updated_seq, updated_qual = fe.update(record, flag_data)
assert expected_name == updated_name
assert record[1] == updated_seq
assert updated_qual is None
def test_FastaFlagExtractor_noSelectedFlags_noStatFlags():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fasta_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastaFlagExtractor()
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastaFlagExtractor_update(fe, record, flag_data)
assert const.UT_FLAG_NAME not in fe.extract_selected(record, match_result)
assert 0 == len(fe.flagstats.keys())
def test_FastaFlagExtractor_noStatFlags():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fasta_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastaFlagExtractor([const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastaFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
flag_data = fe.extract_selected(record, match_result)
assert_FastaFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert 0 == len(fe.flagstats.keys())
def test_FastaFlagExtractor():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fasta_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastaFlagExtractor([const.UT_FLAG_NAME], [const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastaFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
flag_data = fe.extract_selected(record, match_result)
assert_FastaFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert 1 == len(fe.flagstats.keys())
def test_FastaFlagExtractor_noSelectedFlags():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fasta_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastaFlagExtractor(None, [const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastaFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert const.UT_FLAG_NAME not in fe.extract_selected(record, match_result)
assert 1 == len(fe.flagstats.keys())
def assert_FastqFlagExtractor_update(
fe: flag.FastqFlagExtractor,
record: seqio.SimpleFastxRecord,
flag_data: Dict[str, const.FlagData],
) -> None:
expected_name = f"{record[0]}{fe.flag_delim}{fe.flag_delim}{const.UT_FLAG_NAME}"
expected_name += f"{fe.flag_delim}{flag_data[const.UT_FLAG_NAME][0]}"
expected_name += f"{fe.flag_delim}{fe.flag_delim}q{const.UT_FLAG_NAME}"
expected_name += f"{fe.flag_delim}{flag_data['q'+const.UT_FLAG_NAME][0]}"
updated_name, updated_seq, updated_qual = fe.update(record, flag_data)
assert expected_name == updated_name
assert record[1] == updated_seq
assert record[2] == updated_qual
def test_FastqFlagExtractor_noSelectedFlags_noStatFlags():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fastq_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastqFlagExtractor()
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastqFlagExtractor_update(fe, record, flag_data)
assert const.UT_FLAG_NAME not in fe.extract_selected(record, match_result)
assert 0 == len(fe.flagstats.keys())
def test_FastqFlagExtractor_noStatFlags():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fastq_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastqFlagExtractor([const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastqFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert record[2][:8] == flag_data["q" + const.UT_FLAG_NAME][0]
flag_data = fe.extract_selected(record, match_result)
assert_FastqFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert record[2][:8] == flag_data["q" + const.UT_FLAG_NAME][0]
assert 0 == len(fe.flagstats.keys())
def test_FastqFlagExtractor():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fastq_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastqFlagExtractor([const.UT_FLAG_NAME], [const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastqFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert record[2][:8] == flag_data["q" + const.UT_FLAG_NAME][0]
flag_data = fe.extract_selected(record, match_result)
assert_FastqFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert record[2][:8] == flag_data["q" + const.UT_FLAG_NAME][0]
assert 1 == len(fe.flagstats.keys())
def test_FastqFlagExtractor_noSelectedFlags():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fastq_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastqFlagExtractor(None, [const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
assert_FastqFlagExtractor_update(fe, record, flag_data)
assert record[1][:8] == flag_data[const.UT_FLAG_NAME][0]
assert record[2][:8] == flag_data["q" + const.UT_FLAG_NAME][0]
assert const.UT_FLAG_NAME not in fe.extract_selected(record, match_result)
assert 1 == len(fe.flagstats.keys())
def test_get_fastx_flag_extractor():
assert (
flag.get_fastx_flag_extractor(const.FastxFormats.FASTA)
is flag.FastaFlagExtractor
)
assert (
flag.get_fastx_flag_extractor(const.FastxFormats.FASTQ)
is flag.FastqFlagExtractor
)
assert (
flag.get_fastx_flag_extractor(const.FastxFormats.NONE) is flag.ABCFlagExtractor
)
def test_FastxFlagReader_fasta():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fasta_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastaFlagExtractor(None, [const.UT_FLAG_NAME])
fr = flag.FastxFlagReader([const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
updated_record = fe.update(record, flag_data)
read_flag_data = fr.read(updated_record)
for k, v in flag_data.items():
assert k in read_flag_data
assert read_flag_data[k][0] == v[0]
assert list(fe.flagstats.items()) == list(fr.flagstats.items())
def test_FastxFlagReader_fastq():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
generated_records = random.make_fastq_file(
const.UT_N_RECORDS, const.UT_RECORD_SEQ_LEN
)
fe = flag.FastqFlagExtractor(None, [const.UT_FLAG_NAME])
fr = flag.FastxFlagReader([const.UT_FLAG_NAME])
for record in generated_records:
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fe.update_stats(flag_data)
updated_record = fe.update(record, flag_data)
read_flag_data = fr.read(updated_record)
for k, v in flag_data.items():
assert k in read_flag_data
assert read_flag_data[k][0] == v[0]
assert list(fe.flagstats.items()) == list(fr.flagstats.items())
def test_FlagRegexes_fasta():
matcher = match.FastxMatcher(regex.compile(const.UT_FLAG_PATTERN))
fe = flag.FastaFlagExtractor([const.UT_FLAG_NAME])
record = ("fake", "ATCGATCGATCGATCGAT", None)
match_result, matched = matcher.do(record)
flag_data = fe.extract_all(record, match_result)
fr = flag.FlagRegexes([f"{const.UT_FLAG_NAME},^AT.{{6}}$"])
assert fr.match(flag_data)
fr = flag.FlagRegexes([f"{const.UT_FLAG_NAME},^GT.{{6}}$"])
assert not fr.match(flag_data)
| 42.325301
| 87
| 0.711548
| 1,441
| 10,539
| 4.900069
| 0.06662
| 0.075343
| 0.08724
| 0.095596
| 0.910636
| 0.910494
| 0.905537
| 0.894916
| 0.864467
| 0.844356
| 0
| 0.008886
| 0.177816
| 10,539
| 248
| 88
| 42.495968
| 0.806001
| 0.006547
| 0
| 0.737089
| 0
| 0
| 0.040432
| 0.036895
| 0
| 0
| 0
| 0
| 0.28169
| 1
| 0.070423
| false
| 0
| 0.014085
| 0
| 0.084507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ebc0746fe3e17efd4a27eeb5a4658664d0c1d1e2
| 2,839
|
py
|
Python
|
tests/unit/test_lb.py
|
magnuswatn/bigacme
|
2ed2502f3f5002c65cb8f5fa9432b8025900b0d1
|
[
"MIT"
] | 3
|
2018-12-05T07:03:00.000Z
|
2021-01-28T08:42:20.000Z
|
tests/unit/test_lb.py
|
magnuswatn/bigacme
|
2ed2502f3f5002c65cb8f5fa9432b8025900b0d1
|
[
"MIT"
] | 6
|
2017-02-04T10:24:27.000Z
|
2022-03-23T15:28:22.000Z
|
tests/unit/test_lb.py
|
magnuswatn/bigacme
|
2ed2502f3f5002c65cb8f5fa9432b8025900b0d1
|
[
"MIT"
] | 2
|
2021-01-28T08:43:19.000Z
|
2022-01-20T12:50:05.000Z
|
import mock
from collections import namedtuple
import pytest
import bigacme.lb
def mocked_bigsuds(hostname, username, password, verify):
if hostname == 'active':
lb = mock.Mock()
lb.System.Failover.get_failover_state.return_value = 'FAILOVER_STATE_ACTIVE'
elif hostname == 'standby':
lb = mock.Mock()
lb.System.Failover.get_failover_state.return_value = 'FAILOVER_STATE_STANDBY'
return lb
@mock.patch('bigacme.lb.bigsuds.BIGIP', side_effect=mocked_bigsuds)
def test__init__with_first_active(mock_bigsuds):
configtp = namedtuple("Config", ["lb_user", "lb_pwd", "lb1", "lb2", "lb_dg",
"lb_dg_partition"])
config = configtp(lb_user='user', lb_pwd='pass', lb1='active', lb2='standby',
lb_dg='datagroup', lb_dg_partition='Partition')
lb = bigacme.lb.LoadBalancer(config)
assert lb.bigip.System.Failover.get_failover_state.return_value == 'FAILOVER_STATE_ACTIVE'
assert lb.bigip.System.Failover.get_failover_state.called
assert not lb.bigip.System.SystemInfo.get_uptime.called
@mock.patch('bigacme.lb.bigsuds.BIGIP', side_effect=mocked_bigsuds)
def test__init__with_second_active(mock_bigsuds):
configtp = namedtuple("Config", ["lb_user", "lb_pwd", "lb1", "lb2", "lb_dg",
"lb_dg_partition"])
config = configtp(lb_user='user', lb_pwd='pass', lb1='standby', lb2='active',
lb_dg='datagroup', lb_dg_partition='Partition')
lb = bigacme.lb.LoadBalancer(config)
assert lb.bigip.System.Failover.get_failover_state.return_value == 'FAILOVER_STATE_ACTIVE'
assert lb.bigip.System.Failover.get_failover_state.called
assert not lb.bigip.System.SystemInfo.get_uptime.called
@mock.patch('bigacme.lb.bigsuds.BIGIP', side_effect=mocked_bigsuds)
def test__init__with_none_active(mock_bigsuds):
configtp = namedtuple("Config", ["lb_user", "lb_pwd", "lb1", "lb2", "lb_dg",
"lb_dg_partition"])
config = configtp(lb_user='user', lb_pwd='pass', lb1='standby', lb2='standby',
lb_dg='datagroup', lb_dg_partition='Partition')
with pytest.raises(bigacme.lb.NoActiveLoadBalancersError):
bigacme.lb.LoadBalancer(config)
@mock.patch('bigacme.lb.bigsuds.BIGIP', side_effect=mocked_bigsuds)
def test__init__standalone(mock_bigsuds):
configtp = namedtuple("Config", ["lb_user", "lb_pwd", "lb1", "lb2", "lb_dg",
"lb_dg_partition"])
config = configtp(lb_user='user', lb_pwd='pass', lb1='standby', lb2=None,
lb_dg='datagroup', lb_dg_partition='Partition')
lb = bigacme.lb.LoadBalancer(config)
assert not lb.bigip.System.Failover.get_failover_state.called
assert lb.bigip.System.SystemInfo.get_uptime.called
| 50.696429
| 94
| 0.682987
| 361
| 2,839
| 5.083102
| 0.149584
| 0.034877
| 0.039237
| 0.095368
| 0.845777
| 0.844142
| 0.844142
| 0.823433
| 0.823433
| 0.768392
| 0
| 0.006929
| 0.186685
| 2,839
| 55
| 95
| 51.618182
| 0.787787
| 0
| 0
| 0.55102
| 0
| 0
| 0.184924
| 0.063755
| 0
| 0
| 0
| 0
| 0.163265
| 1
| 0.102041
| false
| 0.102041
| 0.081633
| 0
| 0.204082
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
cce389b002fdc617fd85a5e994444d5f02bbda5a
| 23,544
|
py
|
Python
|
Jai-SDC-Term1-Project1.py
|
jailad/SDC-Term1-Project1
|
a016e0bb13e11ff7751cb9cd9edaff4938a93bec
|
[
"MIT"
] | null | null | null |
Jai-SDC-Term1-Project1.py
|
jailad/SDC-Term1-Project1
|
a016e0bb13e11ff7751cb9cd9edaff4938a93bec
|
[
"MIT"
] | null | null | null |
Jai-SDC-Term1-Project1.py
|
jailad/SDC-Term1-Project1
|
a016e0bb13e11ff7751cb9cd9edaff4938a93bec
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# In[ ]:
# Importing the necessary packages
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
import cv2
import math
import os
import datetime
from math import sqrt
get_ipython().magic('matplotlib inline')
# Packages below needed to edit/save/watch video clips
from moviepy.editor import VideoFileClip
from IPython.display import HTML
# In[ ]:
# Constants
kTestImagesRelativeInputPathDir = "test_images/"
kTestImagesRelativeOutputPathDir = "test_images_output/"
kTestVideosRelativeInputPathDir = "test_videos/"
kTestVideosRelativeOutputPathDir = "test_videos_output/"
# Global variable(s)
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
# This boolean is used to determine if we need to produce intermediate image artifacts, after each processing operation like Gray Scaling etc.
# These artifacts are useful for debugging
# The artifacts once generated are placed within the 'test_images_output' folder
generateIntermediateArtifacts = False
# In[ ]:
# Helper method(s)
def get_region_of_interest_vertices(image):
xsize = image.shape[1]
ysize = image.shape[0]
y_offset = 42
left_bottom = [120, ysize]
right_bottom = [850, ysize]
left_top = [480, ysize/2 + y_offset]
right_top = [490, ysize/2 + y_offset]
region_of_interest_vertices = np.array([[left_top,right_top,right_bottom,left_bottom]], dtype=np.int32)
return region_of_interest_vertices
def grayscale(img):
return cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Or use BGR2GRAY if you read an image with cv2.imread()
# return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
def gaussian_blur(img, kernel_size):
return cv2.GaussianBlur(img, (kernel_size, kernel_size), 0)
def region_of_interest(img, vertices):
#defining a blank mask to start with
mask = np.zeros_like(img)
#defining a 3 channel or 1 channel color to fill the mask with depending on the input image
if len(img.shape) > 2:
channel_count = img.shape[2] # i.e. 3 or 4 depending on your image
ignore_mask_color = (255,) * channel_count
else:
ignore_mask_color = 255
#filling pixels inside the polygon defined by "vertices" with the fill color
cv2.fillPoly(mask, vertices, ignore_mask_color)
#returning the image only where mask pixels are nonzero
masked_image = cv2.bitwise_and(img, mask)
return masked_image
def canny(img, low_threshold, high_threshold):
return cv2.Canny(img, low_threshold, high_threshold)
def draw_lines(img, lines, color=[255, 0, 0], thickness=10):
try:
global global_previous_left_lane_bottom_roi_intersection_point
global global_previous_right_lane_bottom_roi_intersection_point
leftLaneLineMaxLength = 0
rightLaneLineMaxLength = 0
longestLeftLaneLine = (0,0,0,0)
longestRightLaneLine = (0,0,0,0)
for line in lines:
for x1,y1,x2,y2 in line:
dy = y2 - y1
dx = x2 - x1
slope = dy / dx
lineLength = sqrt(dy**2 + dx**2)
if slope < 0:
if(lineLength > leftLaneLineMaxLength):
leftLaneLineMaxLength = lineLength
longestLeftLaneLine = line
else:
if(lineLength > rightLaneLineMaxLength):
rightLaneLineMaxLength = lineLength
longestRightLaneLine = line
region_of_interest_vertices = get_region_of_interest_vertices(img)
region_of_interest_left_top = region_of_interest_vertices[0][0]
region_of_interest_right_top = region_of_interest_vertices[0][1]
region_of_interest_right_bottom = region_of_interest_vertices[0][2]
region_of_interest_left_bottom = region_of_interest_vertices[0][3]
region_of_interest_top_line = get_line(region_of_interest_left_top, region_of_interest_right_top)
region_of_interest_bottom_line = get_line(region_of_interest_left_bottom, region_of_interest_right_bottom)
longest_left_lane_line = get_line([longestLeftLaneLine[0][0],longestLeftLaneLine[0][1]],[longestLeftLaneLine[0][2],longestLeftLaneLine[0][3]])
longest_right_lane_line = get_line([longestRightLaneLine[0][0],longestRightLaneLine[0][1]],[longestRightLaneLine[0][2],longestRightLaneLine[0][3]])
top_left_intersection_point = intersection(region_of_interest_top_line, longest_left_lane_line)
bottom_left_intersection_point = intersection(region_of_interest_bottom_line, longest_left_lane_line)
if global_previous_left_lane_bottom_roi_intersection_point == (0,0):
global_previous_left_lane_bottom_roi_intersection_point = bottom_left_intersection_point
top_right_intersection_point = intersection(region_of_interest_top_line, longest_right_lane_line)
bottom_right_intersection_point = intersection(region_of_interest_bottom_line, longest_right_lane_line)
if global_previous_right_lane_bottom_roi_intersection_point == (0,0):
global_previous_right_lane_bottom_roi_intersection_point = bottom_right_intersection_point
if (top_left_intersection_point and bottom_left_intersection_point and top_right_intersection_point and bottom_right_intersection_point):
cv2.line(img, tuple(top_left_intersection_point), tuple(global_previous_left_lane_bottom_roi_intersection_point), color, thickness)
cv2.line(img, tuple(top_right_intersection_point), tuple(global_previous_right_lane_bottom_roi_intersection_point), color, thickness)
except TypeError:
print("Ignoring sporadic type error as noise.")
# Method below is from : http://stackoverflow.com/a/20679579
def get_line(p1, p2):
A = (p1[1] - p2[1])
B = (p2[0] - p1[0])
C = (p1[0]*p2[1] - p2[0]*p1[1])
return A, B, -C
# Method below is from : http://stackoverflow.com/a/20679579
def intersection(L1, L2):
D = L1[0] * L2[1] - L1[1] * L2[0]
Dx = L1[2] * L2[1] - L1[1] * L2[2]
Dy = L1[0] * L2[2] - L1[2] * L2[0]
if D != 0:
x = Dx / D
y = Dy / D
return (int(x),int(y))
else:
return False
def hough_lines(img, rho, theta, threshold, min_line_len, max_line_gap):
lines = cv2.HoughLinesP(img, rho, theta, threshold, np.array([]), minLineLength=min_line_len, maxLineGap=max_line_gap)
line_img = np.zeros((img.shape[0], img.shape[1], 3), dtype=np.uint8)
draw_lines(line_img, lines)
return line_img
def weighted_img(img, initial_img, α=0.8, β=1, λ=0):
return cv2.addWeighted(initial_img, α, img, β, λ)
def process_image(image):
currentTime = datetime.datetime.now()
currentTimeString = str(currentTime)
# Important to not modify the original image, but instead work on it's copy
original_image_copy = np.copy(image)
greyscale_image = grayscale(original_image_copy)
gaussian_blurred_image = gaussian_blur(greyscale_image,5)
canny_image = canny(gaussian_blurred_image,50,150)
xsize = canny_image.shape[1]
ysize = canny_image.shape[0]
y_offset = 42
left_bottom = [120, ysize]
right_bottom = [850, ysize]
left_top = [480, ysize/2 + y_offset]
right_top = [490, ysize/2 + y_offset]
region_of_interest_vertices = np.array([[left_top,right_top,right_bottom,left_bottom]], dtype=np.int32)
region_of_interest_image = region_of_interest(canny_image,region_of_interest_vertices)
hough_lines_image = hough_lines(region_of_interest_image, 2, np.pi/180, 15, 4, 10)
original_image_overlaid_with_lanes = weighted_img(hough_lines_image,original_image_copy)
if generateIntermediateArtifacts == True:
original_image_copy_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_1_original_image_copy.jpg"
plt.imshow(original_image_copy,cmap='gray')
plt.savefig(original_image_copy_filename)
greyscale_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_2_grayscale_image.jpg"
plt.imshow(greyscale_image,cmap='gray')
plt.savefig(greyscale_image_filename)
gaussian_blurred_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_3_gaussian_blurred_image.jpg"
plt.imshow(gaussian_blurred_image,cmap='gray')
plt.savefig(gaussian_blurred_image_filename)
canny_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_4_canny_image.jpg"
plt.imshow(canny_image,cmap='gray')
plt.savefig(canny_image_filename)
region_of_interest_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_5_region_of_interest_image.jpg"
plt.imshow(region_of_interest_image,cmap='gray')
plt.savefig(region_of_interest_image_filename)
hough_lines_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_6_hough_lines_image.jpg"
plt.imshow(hough_lines_image,cmap='gray')
plt.savefig(hough_lines_image_filename)
return original_image_overlaid_with_lanes
# In[16]:
# # Using the Pipeline above to process image(s) - solidWhiteCurve.jpg
generateIntermediateArtifacts = False
imageFile = "solidWhiteCurve.jpg"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_file_relative_path = kTestImagesRelativeInputPathDir + imageFile
output_file_relative_path = kTestImagesRelativeOutputPathDir + imageFile
image_with_detected_lanes = process_image(mpimg.imread(input_file_relative_path))
plt.imshow(image_with_detected_lanes)
plt.savefig(output_file_relative_path)
# In[17]:
# # Using the Pipeline above to process image(s) - solidWhiteRight.jpg
generateIntermediateArtifacts = False
imageFile = "solidWhiteRight.jpg"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_file_relative_path = kTestImagesRelativeInputPathDir + imageFile
output_file_relative_path = kTestImagesRelativeOutputPathDir + imageFile
image_with_detected_lanes = process_image(mpimg.imread(input_file_relative_path))
plt.imshow(image_with_detected_lanes)
plt.savefig(output_file_relative_path)
# In[18]:
# # Using the Pipeline above to process image(s) - solidYellowCurve.jpg
generateIntermediateArtifacts = False
imageFile = "solidYellowCurve.jpg"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_file_relative_path = kTestImagesRelativeInputPathDir + imageFile
output_file_relative_path = kTestImagesRelativeOutputPathDir + imageFile
image_with_detected_lanes = process_image(mpimg.imread(input_file_relative_path))
plt.imshow(image_with_detected_lanes)
plt.savefig(output_file_relative_path)
# In[19]:
# # Using the Pipeline above to process image(s) - solidYellowCurve2.jpg
generateIntermediateArtifacts = False
imageFile = "solidYellowCurve2.jpg"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_file_relative_path = kTestImagesRelativeInputPathDir + imageFile
output_file_relative_path = kTestImagesRelativeOutputPathDir + imageFile
image_with_detected_lanes = process_image(mpimg.imread(input_file_relative_path))
plt.imshow(image_with_detected_lanes)
plt.savefig(output_file_relative_path)
# In[20]:
# # Using the Pipeline above to process image(s) - solidYellowLeft.jpg
generateIntermediateArtifacts = False
imageFile = "solidYellowLeft.jpg"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_file_relative_path = kTestImagesRelativeInputPathDir + imageFile
output_file_relative_path = kTestImagesRelativeOutputPathDir + imageFile
image_with_detected_lanes = process_image(mpimg.imread(input_file_relative_path))
plt.imshow(image_with_detected_lanes)
plt.savefig(output_file_relative_path)
# In[21]:
# # Using the Pipeline above to process image(s) - whiteCarLaneSwitch.jpg
generateIntermediateArtifacts = False
imageFile = "whiteCarLaneSwitch.jpg"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_file_relative_path = kTestImagesRelativeInputPathDir + imageFile
output_file_relative_path = kTestImagesRelativeOutputPathDir + imageFile
image_with_detected_lanes = process_image(mpimg.imread(input_file_relative_path))
plt.imshow(image_with_detected_lanes)
plt.savefig(output_file_relative_path)
# In[22]:
# # Using the Pipeline above to process video(s) - solidWhiteRight.mp4
generateIntermediateArtifacts = False
videoFile = "solidWhiteRight.mp4"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_videofile_relative_path = kTestVideosRelativeInputPathDir + videoFile
output_file_relative_path = kTestVideosRelativeOutputPathDir + videoFile
input_clip = VideoFileClip(input_videofile_relative_path)
output_clip = input_clip.fl_image(process_image) #NOTE: this function expects color images!!
get_ipython().magic('time output_clip.write_videofile(output_file_relative_path, audio=False)')
# In[24]:
HTML("""
<video width="960" height="540" controls>
<source src="{0}">
</video>
""".format(output_file_relative_path))
# In[29]:
# # Using the Pipeline above to process video(s) - solidYellowLeft.mp4
generateIntermediateArtifacts = False
videoFile = "solidYellowLeft.mp4"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_videofile_relative_path = kTestVideosRelativeInputPathDir + videoFile
output_file_relative_path = kTestVideosRelativeOutputPathDir + videoFile
input_clip = VideoFileClip(input_videofile_relative_path)
output_clip = input_clip.fl_image(process_image) #NOTE: this function expects color images!!
get_ipython().magic('time output_clip.write_videofile(output_file_relative_path, audio=False)')
# In[30]:
HTML("""
<video width="960" height="540" controls>
<source src="{0}">
</video>
""".format(output_file_relative_path))
# In[32]:
def hough_lines_challenge(img, rho, theta, threshold, min_line_len, max_line_gap):
lines = cv2.HoughLinesP(img, rho, theta, threshold, np.array([]), minLineLength=min_line_len, maxLineGap=max_line_gap)
line_img = np.zeros((img.shape[0], img.shape[1], 3), dtype=np.uint8)
draw_lines_challenge(line_img, lines)
return line_img
def process_image_challenge(image):
currentTime = datetime.datetime.now()
currentTimeString = str(currentTime)
# Important to not modify the original image, but instead work on it's copy
original_image_copy = np.copy(image)
greyscale_image = grayscale(original_image_copy)
gaussian_blurred_image = gaussian_blur(greyscale_image,5)
canny_image = canny(gaussian_blurred_image,50,150)
region_of_interest_image = region_of_interest(canny_image,get_region_of_interest_vertices_challenge(canny_image))
hough_lines_image = hough_lines_challenge(region_of_interest_image, 2, np.pi/180, 15, 4, 10)
original_image_overlaid_with_lanes = weighted_img(hough_lines_image,original_image_copy)
if generateIntermediateArtifacts == True:
original_image_copy_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_1_original_image_copy.jpg"
plt.imshow(original_image_copy,cmap='gray')
plt.savefig(original_image_copy_filename)
greyscale_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_2_grayscale_image.jpg"
plt.imshow(greyscale_image,cmap='gray')
plt.savefig(greyscale_image_filename)
gaussian_blurred_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_3_gaussian_blurred_image.jpg"
plt.imshow(gaussian_blurred_image,cmap='gray')
plt.savefig(gaussian_blurred_image_filename)
canny_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_4_canny_image.jpg"
plt.imshow(canny_image,cmap='gray')
plt.savefig(canny_image_filename)
region_of_interest_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_5_region_of_interest_image.jpg"
plt.imshow(region_of_interest_image,cmap='gray')
plt.savefig(region_of_interest_image_filename)
hough_lines_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_6_hough_lines_image.jpg"
plt.imshow(hough_lines_image,cmap='gray')
plt.savefig(hough_lines_image_filename)
return original_image_overlaid_with_lanes
def get_region_of_interest_vertices_challenge(image):
xsize = image.shape[1]
ysize = image.shape[0]
y_offset = 52
bottom_offset = 30
left_bottom = [240, ysize - bottom_offset]
right_bottom = [1100, ysize - bottom_offset]
left_top = [630, ysize/2 + y_offset]
right_top = [800, ysize/2 + y_offset]
region_of_interest_vertices = np.array([[left_top,right_top,right_bottom,left_bottom]], dtype=np.int32)
return region_of_interest_vertices
def draw_lines_challenge(img, lines, color=[255, 0, 0], thickness=10):
try:
global global_previous_left_lane_bottom_roi_intersection_point
global global_previous_right_lane_bottom_roi_intersection_point
leftLaneLineMaxLength = 0
rightLaneLineMaxLength = 0
longestLeftLaneLine = (0,0,0,0)
longestRightLaneLine = (0,0,0,0)
for line in lines:
for x1,y1,x2,y2 in line:
dy = y2 - y1
dx = x2 - x1
slope = dy / dx
lineLength = sqrt(dy**2 + dx**2)
if slope < 0:
if(lineLength > leftLaneLineMaxLength):
leftLaneLineMaxLength = lineLength
longestLeftLaneLine = line
elif slope >= 0:
if(lineLength > rightLaneLineMaxLength):
rightLaneLineMaxLength = lineLength
longestRightLaneLine = line
region_of_interest_vertices = get_region_of_interest_vertices_challenge(img)
region_of_interest_left_top = region_of_interest_vertices[0][0]
region_of_interest_right_top = region_of_interest_vertices[0][1]
region_of_interest_right_bottom = region_of_interest_vertices[0][2]
region_of_interest_left_bottom = region_of_interest_vertices[0][3]
region_of_interest_top_line = get_line(region_of_interest_left_top, region_of_interest_right_top)
region_of_interest_bottom_line = get_line(region_of_interest_left_bottom, region_of_interest_right_bottom)
longest_left_lane_line = get_line([longestLeftLaneLine[0][0],longestLeftLaneLine[0][1]],[longestLeftLaneLine[0][2],longestLeftLaneLine[0][3]])
longest_right_lane_line = get_line([longestRightLaneLine[0][0],longestRightLaneLine[0][1]],[longestRightLaneLine[0][2],longestRightLaneLine[0][3]])
top_left_intersection_point = intersection(region_of_interest_top_line, longest_left_lane_line)
bottom_left_intersection_point = intersection(region_of_interest_bottom_line, longest_left_lane_line)
if global_previous_left_lane_bottom_roi_intersection_point == (0,0):
global_previous_left_lane_bottom_roi_intersection_point = bottom_left_intersection_point
top_right_intersection_point = intersection(region_of_interest_top_line, longest_right_lane_line)
bottom_right_intersection_point = intersection(region_of_interest_bottom_line, longest_right_lane_line)
if global_previous_right_lane_bottom_roi_intersection_point == (0,0):
global_previous_right_lane_bottom_roi_intersection_point = bottom_right_intersection_point
if (top_left_intersection_point and bottom_left_intersection_point and top_right_intersection_point and bottom_right_intersection_point):
cv2.line(img, tuple(top_left_intersection_point), tuple(global_previous_left_lane_bottom_roi_intersection_point), color, thickness)
cv2.line(img, tuple(top_right_intersection_point), tuple(global_previous_right_lane_bottom_roi_intersection_point), color, thickness)
except TypeError:
print("Ignoring sporadic type error as noise.")
def process_image_challenge(image):
currentTime = datetime.datetime.now()
currentTimeString = str(currentTime)
# Important to not modify the original image, but instead work on it's copy
original_image_copy = np.copy(image)
greyscale_image = grayscale(original_image_copy)
gaussian_blurred_image = gaussian_blur(greyscale_image,5)
canny_image = canny(gaussian_blurred_image,50,150)
region_of_interest_image = region_of_interest(canny_image,get_region_of_interest_vertices_challenge(canny_image))
hough_lines_image = hough_lines_challenge(region_of_interest_image, 2, np.pi/180, 15, 4, 10)
original_image_overlaid_with_lanes = weighted_img(hough_lines_image,original_image_copy)
if generateIntermediateArtifacts == True:
original_image_copy_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_1_original_image_copy.jpg"
plt.imshow(original_image_copy,cmap='gray')
plt.savefig(original_image_copy_filename)
greyscale_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_2_grayscale_image.jpg"
plt.imshow(greyscale_image,cmap='gray')
plt.savefig(greyscale_image_filename)
gaussian_blurred_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_3_gaussian_blurred_image.jpg"
plt.imshow(gaussian_blurred_image,cmap='gray')
plt.savefig(gaussian_blurred_image_filename)
canny_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_4_canny_image.jpg"
plt.imshow(canny_image,cmap='gray')
plt.savefig(canny_image_filename)
region_of_interest_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_5_region_of_interest_image.jpg"
plt.imshow(region_of_interest_image,cmap='gray')
plt.savefig(region_of_interest_image_filename)
hough_lines_image_filename = kTestImagesRelativeOutputPathDir + currentTimeString + "_6_hough_lines_image.jpg"
plt.imshow(hough_lines_image,cmap='gray')
plt.savefig(hough_lines_image_filename)
return original_image_overlaid_with_lanes
# # Using the Pipeline above to process video(s) - challenge.mp4
generateIntermediateArtifacts = False
videoFile = "challenge.mp4"
global_previous_left_lane_bottom_roi_intersection_point = (0,0)
global_previous_right_lane_bottom_roi_intersection_point = (0,0)
input_videofile_relative_path = kTestVideosRelativeInputPathDir + videoFile
output_file_relative_path = kTestVideosRelativeOutputPathDir + videoFile
input_clip = VideoFileClip(input_videofile_relative_path)
output_clip = input_clip.fl_image(process_image_challenge) #NOTE: this function expects color images!!
get_ipython().magic('time output_clip.write_videofile(output_file_relative_path, audio=False)')
# In[33]:
HTML("""
<video width="960" height="540" controls>
<source src="{0}">
</video>
""".format(output_file_relative_path))
# In[ ]:
| 44.089888
| 151
| 0.765503
| 2,967
| 23,544
| 5.680148
| 0.105157
| 0.034178
| 0.068356
| 0.053403
| 0.848395
| 0.845962
| 0.832849
| 0.829704
| 0.810479
| 0.80769
| 0
| 0.02209
| 0.155921
| 23,544
| 533
| 152
| 44.172608
| 0.825945
| 0.085032
| 0
| 0.785714
| 0
| 0
| 0.059733
| 0.027981
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043956
| false
| 0
| 0.027473
| 0.010989
| 0.112637
| 0.005495
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cce91d2d76f81e7bb9d1033010cfd5d3ca04bb25
| 275,862
|
py
|
Python
|
unittest/test_neg.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | 2
|
2017-08-28T08:41:16.000Z
|
2018-05-29T03:49:36.000Z
|
unittest/test_neg.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
unittest/test_neg.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
##############################################################################
# Project: arrayfunc
# Module: test_neg.py
# Purpose: arrayfunc unit test.
# Language: Python 3.4
# Date: 09-Dec-2017.
# Ver: 31-Oct-2021.
#
###############################################################################
#
# Copyright 2014 - 2021 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
"""This conducts unit tests for neg.
"""
##############################################################################
import sys
import array
import itertools
import math
import operator
import platform
import copy
import unittest
import arrayfunc
##############################################################################
##############################################################################
# The following code is all auto-generated.
##############################################################################
class neg_general_even_arraysize_nosimd_simd_b(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'b' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.b_min + 1
maxval = arrayfunc.arraylimits.b_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('b', xdata)
self.dataout = array.array('b', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_b(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'b' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.b_min + 1
maxval = arrayfunc.arraylimits.b_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('b', xdata)
self.dataout = array.array('b', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_b(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'b' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.b_min + 1
maxval = arrayfunc.arraylimits.b_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('b', xdata)
self.dataout = array.array('b', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_b(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'b' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.b_min + 1
maxval = arrayfunc.arraylimits.b_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('b', xdata)
self.dataout = array.array('b', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code b.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code b.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_nosimd_simd_h(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'h' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.h_min + 1
maxval = arrayfunc.arraylimits.h_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('h', xdata)
self.dataout = array.array('h', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_h(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'h' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.h_min + 1
maxval = arrayfunc.arraylimits.h_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('h', xdata)
self.dataout = array.array('h', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_h(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'h' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.h_min + 1
maxval = arrayfunc.arraylimits.h_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('h', xdata)
self.dataout = array.array('h', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_h(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'h' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.h_min + 1
maxval = arrayfunc.arraylimits.h_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('h', xdata)
self.dataout = array.array('h', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code h.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code h.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_nosimd_simd_i(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'i' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.i_min + 1
maxval = arrayfunc.arraylimits.i_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('i', xdata)
self.dataout = array.array('i', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_i(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'i' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.i_min + 1
maxval = arrayfunc.arraylimits.i_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('i', xdata)
self.dataout = array.array('i', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_i(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'i' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.i_min + 1
maxval = arrayfunc.arraylimits.i_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('i', xdata)
self.dataout = array.array('i', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_i(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'i' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.i_min + 1
maxval = arrayfunc.arraylimits.i_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('i', xdata)
self.dataout = array.array('i', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code i.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code i.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_nosimd_simd_l(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'l' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.l_min + 1
maxval = arrayfunc.arraylimits.l_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('l', xdata)
self.dataout = array.array('l', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_l(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'l' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.l_min + 1
maxval = arrayfunc.arraylimits.l_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('l', xdata)
self.dataout = array.array('l', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_l(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'l' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.l_min + 1
maxval = arrayfunc.arraylimits.l_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('l', xdata)
self.dataout = array.array('l', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_l(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'l' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.l_min + 1
maxval = arrayfunc.arraylimits.l_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('l', xdata)
self.dataout = array.array('l', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code l.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code l.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_nosimd_simd_q(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'q' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.q_min + 1
maxval = arrayfunc.arraylimits.q_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('q', xdata)
self.dataout = array.array('q', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_q(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'q' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.q_min + 1
maxval = arrayfunc.arraylimits.q_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('q', xdata)
self.dataout = array.array('q', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_q(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'q' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.q_min + 1
maxval = arrayfunc.arraylimits.q_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('q', xdata)
self.dataout = array.array('q', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_q(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'q' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.q_min + 1
maxval = arrayfunc.arraylimits.q_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('q', xdata)
self.dataout = array.array('q', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code q.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code q.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_nosimd_simd_f(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'f' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.f_min + 1
maxval = arrayfunc.arraylimits.f_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('f', xdata)
self.dataout = array.array('f', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_f(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'f' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.f_min + 1
maxval = arrayfunc.arraylimits.f_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('f', xdata)
self.dataout = array.array('f', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_f(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'f' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.f_min + 1
maxval = arrayfunc.arraylimits.f_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('f', xdata)
self.dataout = array.array('f', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_f(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'f' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.f_min + 1
maxval = arrayfunc.arraylimits.f_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('f', xdata)
self.dataout = array.array('f', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code f.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code f.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_nosimd_simd_d(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'd' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.d_min + 1
maxval = arrayfunc.arraylimits.d_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('d', xdata)
self.dataout = array.array('d', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_even_arraysize_withsimd_simd_d(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'even' == 'even':
testdatasize = 320
if 'even' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'd' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.d_min + 1
maxval = arrayfunc.arraylimits.d_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('d', xdata)
self.dataout = array.array('d', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_nosimd_simd_d(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'd' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.d_min + 1
maxval = arrayfunc.arraylimits.d_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('d', xdata)
self.dataout = array.array('d', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited , nosimd=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_general_odd_arraysize_withsimd_simd_d(unittest.TestCase):
"""Test for basic general tests.
test_template_uniop
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if 'odd' == 'even':
testdatasize = 320
if 'odd' == 'odd':
testdatasize = 319
decentre = testdatasize // 2
if 'd' not in ('f', 'd'):
# We don't test the minimum integer value as we are not testing
# the behaviour of integer overflows in this series of tests.
minval = arrayfunc.arraylimits.d_min + 1
maxval = arrayfunc.arraylimits.d_max
else:
# For floating point tests we limit the range to large integer
# size ranges to ensure better coverage of more typical use cases.
minval = arrayfunc.arraylimits.q_min
maxval = arrayfunc.arraylimits.q_max
# Calculate our interval, while making sure that it is not zero.
dstep = max((maxval - minval) // testdatasize, 1)
# Generate test data over the full data type range.
xdata = list(itertools.islice(itertools.cycle(range(minval, maxval, dstep)), testdatasize))
# Make sure the last value is the largest number in the range and
# that we have 0, 1, and -1 in the data samples as well.
xdata[-1] = maxval
xdata[decentre - 1] = -1
xdata[decentre] = 0
xdata[decentre + 1] = 1
self.data = array.array('d', xdata)
self.dataout = array.array('d', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace_a1(self):
"""Test neg in place - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_a2(self):
"""Test neg in place with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_maxlen_a3(self):
"""Test neg in place with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_inplace_ov_maxlen_a4(self):
"""Test neg in place with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.data)[self.limited:]
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.data), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_a5(self):
"""Test neg to output array - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_a6(self):
"""Test neg to output array with matherrors=True - Array code d.
"""
expected = [-(x) for x in self.data]
arrayfunc.neg(self.data, self.dataout, matherrors=True )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_maxlen_a7(self):
"""Test neg to output array with array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_outputarray_ov_maxlen_a8(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code d.
"""
pydataout = [-(x) for x in self.data]
expected = pydataout[0:self.limited] + list(self.dataout)[self.limited:]
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited )
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_param_errors_b(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('b', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('b', itertools.repeat(0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('d', [float(x) for x in self.testarray1])
self.baddataout = array.array('d', [float(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code b.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_b(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('b', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('b', itertools.repeat(0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_param_errors_h(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('h', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('h', itertools.repeat(0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('d', [float(x) for x in self.testarray1])
self.baddataout = array.array('d', [float(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code h.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_h(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('h', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('h', itertools.repeat(0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_param_errors_i(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('i', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('i', itertools.repeat(0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('d', [float(x) for x in self.testarray1])
self.baddataout = array.array('d', [float(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code i.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_i(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('i', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('i', itertools.repeat(0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_param_errors_l(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('l', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('l', itertools.repeat(0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('d', [float(x) for x in self.testarray1])
self.baddataout = array.array('d', [float(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code l.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_l(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('l', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('l', itertools.repeat(0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_param_errors_q(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('q', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('q', itertools.repeat(0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('d', [float(x) for x in self.testarray1])
self.baddataout = array.array('d', [float(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_q(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('q', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('q', itertools.repeat(0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_param_errors_f(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('f', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('f', itertools.repeat(0.0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('i', [int(x) for x in self.testarray1])
self.baddataout = array.array('i', [int(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code f.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_f(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('f', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('f', itertools.repeat(0.0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code f.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_param_errors_d(unittest.TestCase):
"""Test neg for invalid array and numeric parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.testarray1 = array.array('d', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.testarray2 = copy.copy(self.testarray1)
arraysize = len(self.testarray1)
self.dataout = array.array('d', itertools.repeat(0.0, arraysize))
# Create some data array equivalents with an incompatible type.
self.badarray1 = array.array('i', [int(x) for x in self.testarray1])
self.baddataout = array.array('i', [int(x) for x in self.dataout])
########################################################
def test_neg_array_array_a1(self):
"""Test neg as *array-array* for invalid type of input array - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.badarray1, self.dataout)
########################################################
def test_neg_array_array_a2(self):
"""Test neg as *array-array* for invalid type of output array - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.testarray1, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.testarray2, self.baddataout)
########################################################
def test_neg_no_params_b1(self):
"""Test neg with no parameters - Array code d.
"""
with self.assertRaises(TypeError):
arrayfunc.neg()
##############################################################################
##############################################################################
class neg_opt_param_errors_d(unittest.TestCase):
"""Test neg for invalid errors flag and maxlen parameters.
param_invalid_opt_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.inparray1a = array.array('d', [-5,-4,-3,-2,-1,0,1,2,3,4,5])
self.inparray1b = copy.copy(self.inparray1a)
arraysize = len(self.inparray1a)
self.dataout = array.array('d', itertools.repeat(0.0, arraysize))
self.testmaxlen = len(self.inparray1a) // 2
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for matherrors='a' - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, matherrors='a')
########################################################
def test_neg_array_none_a2(self):
"""Test neg as *array-none* for maxlen='a' - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, maxlen='a')
########################################################
def test_neg_array_none_a3(self):
"""Test neg as *array-none* for nosimd='a' - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, nosimd='a')
########################################################
def test_neg_array_array_b1(self):
"""Test neg as *array-array* for matherrors='a' - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, matherrors='a')
########################################################
def test_neg_array_array_b2(self):
"""Test neg as *array-array* for maxlen='a' - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, maxlen=self.testmaxlen)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, maxlen='a')
########################################################
def test_neg_array_array_b3(self):
"""Test neg as *array-array* for nosimd='a' - Array code d.
"""
# This version is expected to pass.
arrayfunc.neg(self.inparray1a, self.dataout, nosimd=False)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.neg(self.inparray1b, self.dataout, nosimd='a')
##############################################################################
##############################################################################
class neg_invalidarray_B(unittest.TestCase):
"""Test for invalid arrays.
test_template_invalidarray
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.data = array.array('B', [5,4,3,2,1,0,1,2,3,4,5])
self.dataout = array.array('B', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace(self):
"""Test neg in place - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data)
########################################################
def test_neg_inplace_ov_a1(self):
"""Test neg in place with matherrors=True - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True)
########################################################
def test_neg_inplace_maxlen_a2(self):
"""Test neg in place with array maxlen - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, maxlen=self.limited)
########################################################
def test_neg_inplace_ov_maxlen_a3(self):
"""Test neg in place with matherrors=True and array maxlen - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited)
########################################################
def test_neg_outputarray_a4(self):
"""Test neg to output array - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout)
########################################################
def test_neg_outputarray_ov_a4(self):
"""Test neg to output array with matherrors=True - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True)
########################################################
def test_neg_outputarray_maxlen_a5(self):
"""Test neg to output array with array maxlen - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited)
########################################################
def test_neg_outputarray_ov_maxlen_a6(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code B.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited)
##############################################################################
##############################################################################
class neg_invalidarray_H(unittest.TestCase):
"""Test for invalid arrays.
test_template_invalidarray
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.data = array.array('H', [5,4,3,2,1,0,1,2,3,4,5])
self.dataout = array.array('H', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace(self):
"""Test neg in place - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data)
########################################################
def test_neg_inplace_ov_a1(self):
"""Test neg in place with matherrors=True - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True)
########################################################
def test_neg_inplace_maxlen_a2(self):
"""Test neg in place with array maxlen - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, maxlen=self.limited)
########################################################
def test_neg_inplace_ov_maxlen_a3(self):
"""Test neg in place with matherrors=True and array maxlen - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited)
########################################################
def test_neg_outputarray_a4(self):
"""Test neg to output array - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout)
########################################################
def test_neg_outputarray_ov_a4(self):
"""Test neg to output array with matherrors=True - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True)
########################################################
def test_neg_outputarray_maxlen_a5(self):
"""Test neg to output array with array maxlen - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited)
########################################################
def test_neg_outputarray_ov_maxlen_a6(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code H.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited)
##############################################################################
##############################################################################
class neg_invalidarray_I(unittest.TestCase):
"""Test for invalid arrays.
test_template_invalidarray
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.data = array.array('I', [5,4,3,2,1,0,1,2,3,4,5])
self.dataout = array.array('I', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace(self):
"""Test neg in place - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data)
########################################################
def test_neg_inplace_ov_a1(self):
"""Test neg in place with matherrors=True - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True)
########################################################
def test_neg_inplace_maxlen_a2(self):
"""Test neg in place with array maxlen - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, maxlen=self.limited)
########################################################
def test_neg_inplace_ov_maxlen_a3(self):
"""Test neg in place with matherrors=True and array maxlen - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited)
########################################################
def test_neg_outputarray_a4(self):
"""Test neg to output array - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout)
########################################################
def test_neg_outputarray_ov_a4(self):
"""Test neg to output array with matherrors=True - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True)
########################################################
def test_neg_outputarray_maxlen_a5(self):
"""Test neg to output array with array maxlen - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited)
########################################################
def test_neg_outputarray_ov_maxlen_a6(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code I.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited)
##############################################################################
##############################################################################
class neg_invalidarray_L(unittest.TestCase):
"""Test for invalid arrays.
test_template_invalidarray
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.data = array.array('L', [5,4,3,2,1,0,1,2,3,4,5])
self.dataout = array.array('L', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace(self):
"""Test neg in place - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data)
########################################################
def test_neg_inplace_ov_a1(self):
"""Test neg in place with matherrors=True - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True)
########################################################
def test_neg_inplace_maxlen_a2(self):
"""Test neg in place with array maxlen - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, maxlen=self.limited)
########################################################
def test_neg_inplace_ov_maxlen_a3(self):
"""Test neg in place with matherrors=True and array maxlen - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited)
########################################################
def test_neg_outputarray_a4(self):
"""Test neg to output array - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout)
########################################################
def test_neg_outputarray_ov_a4(self):
"""Test neg to output array with matherrors=True - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True)
########################################################
def test_neg_outputarray_maxlen_a5(self):
"""Test neg to output array with array maxlen - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited)
########################################################
def test_neg_outputarray_ov_maxlen_a6(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code L.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited)
##############################################################################
##############################################################################
class neg_invalidarray_Q(unittest.TestCase):
"""Test for invalid arrays.
test_template_invalidarray
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.data = array.array('Q', [5,4,3,2,1,0,1,2,3,4,5])
self.dataout = array.array('Q', [0]*len(self.data))
self.limited = len(self.data) // 2
########################################################
def test_neg_inplace(self):
"""Test neg in place - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data)
########################################################
def test_neg_inplace_ov_a1(self):
"""Test neg in place with matherrors=True - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True)
########################################################
def test_neg_inplace_maxlen_a2(self):
"""Test neg in place with array maxlen - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, maxlen=self.limited)
########################################################
def test_neg_inplace_ov_maxlen_a3(self):
"""Test neg in place with matherrors=True and array maxlen - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, matherrors=True, maxlen=self.limited)
########################################################
def test_neg_outputarray_a4(self):
"""Test neg to output array - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout)
########################################################
def test_neg_outputarray_ov_a4(self):
"""Test neg to output array with matherrors=True - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True)
########################################################
def test_neg_outputarray_maxlen_a5(self):
"""Test neg to output array with array maxlen - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, maxlen=self.limited)
########################################################
def test_neg_outputarray_ov_maxlen_a6(self):
"""Test neg to output array with matherrors=True and array maxlen - Array code Q.
"""
with self.assertRaises(TypeError):
arrayfunc.neg(self.data, self.dataout, matherrors=True, maxlen=self.limited)
##############################################################################
##############################################################################
class overflow_signed_ovflmin_b(unittest.TestCase):
"""Test neg for value overflow for negating or taking absolute
values of min values in signed arrays.
param_overflow_minval_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.b_max
self.MinLimit = arrayfunc.arraylimits.b_min
self.maxarray = array.array('b', itertools.repeat(self.MaxLimit, arraysize))
self.minarray = array.array('b', itertools.repeat(self.MinLimit, arraysize))
self.dataout = array.array('b', itertools.repeat(0, arraysize))
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for overflow of min value - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray)
########################################################
def test_neg_array_num_array_a2(self):
"""Test neg as *array-array* for overflow of min value - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray, self.dataout)
########################################################
def test_neg_array_none_b1(self):
"""Test neg as *array-none* for overflow of min value with matherrors=True - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, matherrors=True)
########################################################
def test_neg_array_num_array_b2(self):
"""Test neg as *array-array* for overflow of min value with matherrors=True - Array code b.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, self.dataout, matherrors=True)
##############################################################################
##############################################################################
class overflow_signed_ovflmin_h(unittest.TestCase):
"""Test neg for value overflow for negating or taking absolute
values of min values in signed arrays.
param_overflow_minval_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.h_max
self.MinLimit = arrayfunc.arraylimits.h_min
self.maxarray = array.array('h', itertools.repeat(self.MaxLimit, arraysize))
self.minarray = array.array('h', itertools.repeat(self.MinLimit, arraysize))
self.dataout = array.array('h', itertools.repeat(0, arraysize))
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for overflow of min value - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray)
########################################################
def test_neg_array_num_array_a2(self):
"""Test neg as *array-array* for overflow of min value - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray, self.dataout)
########################################################
def test_neg_array_none_b1(self):
"""Test neg as *array-none* for overflow of min value with matherrors=True - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, matherrors=True)
########################################################
def test_neg_array_num_array_b2(self):
"""Test neg as *array-array* for overflow of min value with matherrors=True - Array code h.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, self.dataout, matherrors=True)
##############################################################################
##############################################################################
class overflow_signed_ovflmin_i(unittest.TestCase):
"""Test neg for value overflow for negating or taking absolute
values of min values in signed arrays.
param_overflow_minval_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.i_max
self.MinLimit = arrayfunc.arraylimits.i_min
self.maxarray = array.array('i', itertools.repeat(self.MaxLimit, arraysize))
self.minarray = array.array('i', itertools.repeat(self.MinLimit, arraysize))
self.dataout = array.array('i', itertools.repeat(0, arraysize))
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for overflow of min value - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray)
########################################################
def test_neg_array_num_array_a2(self):
"""Test neg as *array-array* for overflow of min value - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray, self.dataout)
########################################################
def test_neg_array_none_b1(self):
"""Test neg as *array-none* for overflow of min value with matherrors=True - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, matherrors=True)
########################################################
def test_neg_array_num_array_b2(self):
"""Test neg as *array-array* for overflow of min value with matherrors=True - Array code i.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, self.dataout, matherrors=True)
##############################################################################
##############################################################################
class overflow_signed_ovflmin_l(unittest.TestCase):
"""Test neg for value overflow for negating or taking absolute
values of min values in signed arrays.
param_overflow_minval_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.l_max
self.MinLimit = arrayfunc.arraylimits.l_min
self.maxarray = array.array('l', itertools.repeat(self.MaxLimit, arraysize))
self.minarray = array.array('l', itertools.repeat(self.MinLimit, arraysize))
self.dataout = array.array('l', itertools.repeat(0, arraysize))
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for overflow of min value - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray)
########################################################
def test_neg_array_num_array_a2(self):
"""Test neg as *array-array* for overflow of min value - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray, self.dataout)
########################################################
def test_neg_array_none_b1(self):
"""Test neg as *array-none* for overflow of min value with matherrors=True - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, matherrors=True)
########################################################
def test_neg_array_num_array_b2(self):
"""Test neg as *array-array* for overflow of min value with matherrors=True - Array code l.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, self.dataout, matherrors=True)
##############################################################################
##############################################################################
class overflow_signed_ovflmin_q(unittest.TestCase):
"""Test neg for value overflow for negating or taking absolute
values of min values in signed arrays.
param_overflow_minval_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
arraysize = 200
self.MaxLimit = arrayfunc.arraylimits.q_max
self.MinLimit = arrayfunc.arraylimits.q_min
self.maxarray = array.array('q', itertools.repeat(self.MaxLimit, arraysize))
self.minarray = array.array('q', itertools.repeat(self.MinLimit, arraysize))
self.dataout = array.array('q', itertools.repeat(0, arraysize))
########################################################
def test_neg_array_none_a1(self):
"""Test neg as *array-none* for overflow of min value - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray)
########################################################
def test_neg_array_num_array_a2(self):
"""Test neg as *array-array* for overflow of min value - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout)
# This is the actual test.
with self.assertRaises(OverflowError):
arrayfunc.neg(self.minarray, self.dataout)
########################################################
def test_neg_array_none_b1(self):
"""Test neg as *array-none* for overflow of min value with matherrors=True - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, matherrors=True)
########################################################
def test_neg_array_num_array_b2(self):
"""Test neg as *array-array* for overflow of min value with matherrors=True - Array code q.
"""
# This version is expected to pass.
arrayfunc.neg(self.maxarray, self.dataout, matherrors=True)
# This is the actual test.
arrayfunc.neg(self.minarray, self.dataout, matherrors=True)
##############################################################################
##############################################################################
class neg_nandata_exceptions_nan_even_arraysize_f(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'even' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('f', outdata)
self.datainf = array.array('f', infdata)
self.datanan = array.array('f', nandata)
self.dataninf = array.array('f', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of nan with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of nan with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of nan with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of nan with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datanan), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_nan_odd_arraysize_f(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'odd' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('f', outdata)
self.datainf = array.array('f', infdata)
self.datanan = array.array('f', nandata)
self.dataninf = array.array('f', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of nan with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of nan with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of nan with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of nan with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datanan), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_nan_even_arraysize_d(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'even' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('d', outdata)
self.datainf = array.array('d', infdata)
self.datanan = array.array('d', nandata)
self.dataninf = array.array('d', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of nan with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of nan with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of nan with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of nan with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datanan), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_nan_odd_arraysize_d(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'odd' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('d', outdata)
self.datainf = array.array('d', infdata)
self.datanan = array.array('d', nandata)
self.dataninf = array.array('d', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of nan with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of nan with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datanan)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of nan with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of nan with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datanan]
# This is the actual test.
arrayfunc.neg(self.datanan, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datanan), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_inf_even_arraysize_f(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'even' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('f', outdata)
self.datainf = array.array('f', infdata)
self.datanan = array.array('f', nandata)
self.dataninf = array.array('f', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datainf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_inf_odd_arraysize_f(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'odd' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('f', outdata)
self.datainf = array.array('f', infdata)
self.datanan = array.array('f', nandata)
self.dataninf = array.array('f', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datainf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_inf_even_arraysize_d(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'even' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('d', outdata)
self.datainf = array.array('d', infdata)
self.datanan = array.array('d', nandata)
self.dataninf = array.array('d', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datainf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_inf_odd_arraysize_d(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'odd' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('d', outdata)
self.datainf = array.array('d', infdata)
self.datanan = array.array('d', nandata)
self.dataninf = array.array('d', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.datainf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.datainf]
# This is the actual test.
arrayfunc.neg(self.datainf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.datainf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_ninf_even_arraysize_f(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'even' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('f', outdata)
self.datainf = array.array('f', infdata)
self.datanan = array.array('f', nandata)
self.dataninf = array.array('f', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of -inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of -inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of -inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of -inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataninf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_ninf_odd_arraysize_f(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'odd' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('f', outdata)
self.datainf = array.array('f', infdata)
self.datanan = array.array('f', nandata)
self.dataninf = array.array('f', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of -inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of -inf with matherrors checking on and single parameter functions - Array code f.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of -inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of -inf with matherrors=True and single parameter functions - Array code f.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataninf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_ninf_even_arraysize_d(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'even' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('d', outdata)
self.datainf = array.array('d', infdata)
self.datanan = array.array('d', nandata)
self.dataninf = array.array('d', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of -inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of -inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of -inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of -inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataninf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
class neg_nandata_exceptions_ninf_odd_arraysize_d(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%0.3f != %0.3f' % (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
# This allows the template to select a number which fits evenly into
# SIMD register sizes or for which processing overflows into the
# non-SIMD cleanup code at the end.
arraylength = 64
if 'odd' == 'even':
outdata = list(itertools.repeat(0.0, arraylength))
infdata = [math.inf] * arraylength
nandata = [math.nan] * arraylength
ninfdata = [-math.inf] * arraylength
else:
outdata = list(itertools.repeat(0.0, arraylength + 1))
infdata = ([1.0] * arraylength) + [math.inf]
nandata = ([1.0] * arraylength) + [math.nan]
ninfdata = ([1.0] * arraylength) + [-math.inf]
self.dataout = array.array('d', outdata)
self.datainf = array.array('d', infdata)
self.datanan = array.array('d', nandata)
self.dataninf = array.array('d', ninfdata)
########################################################
def test_neg_outputarray_a1(self):
"""Test neg for data of -inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf, self.dataout)
########################################################
def test_neg_inplace_a2(self):
"""Test neg in place for data of -inf with matherrors checking on and single parameter functions - Array code d.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.neg(self.dataninf)
########################################################
def test_neg_ov_outputarray_a3(self):
"""Test neg for data of -inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_neg_ov_inplace_a4(self):
"""Test neg in place for data of -inf with matherrors=True and single parameter functions - Array code d.
"""
# Calculate the expected result.
expected = [-(x) for x in self.dataninf]
# This is the actual test.
arrayfunc.neg(self.dataninf, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataninf), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
##############################################################################
if __name__ == '__main__':
# Check to see if the log file option has been selected. This is an option
# which we have added in order to decide where to output the results.
if '-l' in sys.argv:
# Remove the option from the argument list so that "unittest" does
# not complain about unknown options.
sys.argv.remove('-l')
with open('af_unittest.txt', 'a') as f:
f.write('\n\n')
f.write('neg\n\n')
trun = unittest.TextTestRunner(f)
unittest.main(testRunner=trun)
else:
unittest.main()
##############################################################################
| 33.927192
| 115
| 0.615387
| 32,885
| 275,862
| 5.093842
| 0.010856
| 0.033849
| 0.044319
| 0.03152
| 0.993224
| 0.991702
| 0.991302
| 0.991051
| 0.991051
| 0.98735
| 0
| 0.008747
| 0.146323
| 275,862
| 8,130
| 116
| 33.931365
| 0.702561
| 0.308459
| 0
| 0.959975
| 0
| 0
| 0.008964
| 0
| 0
| 0
| 0
| 0
| 0.147713
| 1
| 0.158513
| false
| 0.012706
| 0.002859
| 0
| 0.181703
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
69497d7230b307b65abe5d42dc07237bc0826133
| 44,167
|
py
|
Python
|
ooiservices/tests/test_user_event_notifications.py
|
Bobfrat/ooi-ui-services
|
32f67948510e6af0bc06d62fe49ff3c991b22b40
|
[
"Apache-2.0"
] | 2
|
2015-02-28T00:20:30.000Z
|
2015-04-30T12:40:31.000Z
|
ooiservices/tests/test_user_event_notifications.py
|
Bobfrat/ooi-ui-services
|
32f67948510e6af0bc06d62fe49ff3c991b22b40
|
[
"Apache-2.0"
] | 266
|
2015-01-02T21:29:25.000Z
|
2020-01-23T16:00:11.000Z
|
ooiservices/tests/test_user_event_notifications.py
|
Bobfrat/ooi-ui-services
|
32f67948510e6af0bc06d62fe49ff3c991b22b40
|
[
"Apache-2.0"
] | 13
|
2015-02-04T21:13:34.000Z
|
2016-10-18T14:39:36.000Z
|
#!/usr/bin/env python
'''
Specific testing for user_event_notifications (class UserEventNotifications) used in Alerts and Alarms.
'''
__author__ = 'Edna Donoughe'
import unittest
import json
from base64 import b64encode
from flask import url_for
from ooiservices.app import create_app, db
from ooiservices.app.models import (User, UserScope, Organization)
from ooiservices.app.models import (SystemEventDefinition, SystemEvent, UserEventNotification)
import datetime as dt
import requests
from unittest import skipIf
import os
'''
These tests are additional to the normal testing performed by coverage; each of
these tests are to validate model logic outside of db management.
'''
@skipIf(os.getenv('TRAVIS'), 'Skip if testing from Travis CI.')
class UserEventNotificationsTestCase(unittest.TestCase):
# enable verbose during development and documentation to get a list of sample
# urls used throughout test cases. Always set to False before check in.
verbose = False
debug = False
root = 'http://localhost:4000'
def setUp(self):
self.app = create_app('TESTING_CONFIG')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
test_username = 'admin'
test_password = 'test'
Organization.insert_org()
User.insert_user(username=test_username, password=test_password)
self.client = self.app.test_client(use_cookies=False)
UserScope.insert_scopes()
admin = User.query.filter_by(user_name='admin').first()
scope = UserScope.query.filter_by(scope_name='user_admin').first()
admin.scopes.append(scope)
scope = UserScope.query.filter_by(scope_name='redmine').first() # added
admin.scopes.append(scope)
db.session.add(admin)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def get_api_headers(self, username, password):
return {
'Authorization': 'Basic ' + b64encode(
(username + ':' + password).encode('utf-8')).decode('utf-8'),
'Accept': 'application/json',
'Content-Type': 'application/json'
}
def test_create_user_event_notification(self):
verbose = self.verbose
debug = self.debug
root = self.root
if verbose: print '\n'
content_type = 'application/json'
headers = self.get_api_headers('admin', 'test')
data = {'user_id': 1,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
# Create event_event_notification
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
notify_data = json.loads(response.data)
self.assertEquals(response.status_code, 400)
response_data = json.loads(response.data)
self.assertTrue(response_data is not None)
self.assertTrue(len(response_data) > 0)
self.assertTrue('message' in response_data)
self.assertTrue(len(response_data['message']) > 0)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# # Create an alarm
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
reference_designator = "CE01ISSP-XX099-01-CTDPFJ999"
test_alarm = self.create_alert_alarm_definition(reference_designator, event_type='alarm', uframe_id=2, severity=1)
# (Negative) Create event_event_notification using bad user_id
data = {'user_id': 10,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
notify_data = json.loads(response.data)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
notify = UserEventNotification.query.get(1)
# (Negative) Create event_event_notification using bad user_id (alpha)
data = {'user_id': 'A',
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
notify_data = json.loads(response.data)
self.assertEquals(response.status_code, 409)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
# (Negative) Create event_event_notification using bad value for boolean
data = {'user_id': 1,
'system_event_definition_id': 1,
'use_email': 'A',
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
#notify = UserEventNotification.query.get(1)
# (Negative) Create event_event_notification
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
notify_data = json.loads(response.data)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
# (Positive) Create event_event_notification
content_type = 'application/json'
headers = self.get_api_headers('admin', 'test')
data = {'user_id': 1,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
# Create event_event_notification
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
notify_data = json.loads(response.data)
self.assertEquals(response.status_code, 201)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
#notify = UserEventNotification.query.get(1)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# GET alarm definition by SystemEventDefinition id
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
response = self.client.get(url_for('main.get_alert_alarm_def', id=test_alarm.id), headers=headers)
self.assertEquals(response.status_code, 200)
alarm_definition = json.loads(response.data)
self.assertTrue(alarm_definition is not None)
# Get user_event_notifications (1)
response = self.client.get(url_for('main.get_user_event_notifications'), headers=headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue(data is not None)
notifications = data['notifications']
self.assertTrue(notifications is not None)
self.assertEquals(len(notifications), 2)
url = url_for('main.get_user_event_notifications')
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue('notifications' in data)
self.assertTrue(len(data['notifications']) > 0)
self.assertEquals(len(data['notifications']), 2)
url = url_for('main.get_user_event_notification', id=1)
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
notification = json.loads(response.data)
self.assertTrue(len(notification) > 0)
self.assertEquals(len(notification), 8)
for attribute in UserEventNotification.__table__.columns._data:
self.assertTrue(attribute in notification)
if verbose: print '\n'
def test_update_user_event_notification(self):
verbose = False #self.verbose
root = self.root
if verbose: print '\n'
content_type = 'application/json'
headers = self.get_api_headers('admin', 'test')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Add a second user ('foo', password 'test')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test_username = 'foo'
test_password = 'test'
test_email = 'foo@test.com'
Organization.insert_org()
User.insert_user(username=test_username, password=test_password, email=test_email)
self.client = self.app.test_client(use_cookies=False)
UserScope.insert_scopes()
foo = User.query.filter_by(user_name='foo').first()
scope = UserScope.query.filter_by(scope_name='user_admin').first()
foo.scopes.append(scope)
scope = UserScope.query.filter_by(scope_name='redmine').first() # added
foo.scopes.append(scope)
db.session.add(foo)
db.session.commit()
response = self.client.get(url_for('main.get_user',id=1), headers=headers)
self.assertTrue(response.status_code == 200)
response = self.client.get(url_for('main.get_user',id=2), headers=headers)
self.assertTrue(response.status_code == 200)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# # Create alert and an alarm
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ref_def = "CE01ISSP-XX099-01-CTDPFJ999"
# Create an alarm with user_event_notification - uses definition 1 and user_id 1
test_alarm = self.create_alert_alarm_definition(ref_def, event_type='alarm', uframe_id=2, severity=1)
# Create an alarm without user_event_notification - uses definition 1 and user_id 1
bad_alarm = self.create_alert_alarm_definition_wo_notification(ref_def, event_type='alarm',
uframe_filter_id=2, severity=1)
notification = self.create_user_event_notification(bad_alarm.id, 2)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# GET alarm definition by SystemEventDefinition id
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
response = self.client.get(url_for('main.get_alert_alarm_def', id=test_alarm.id), headers=headers)
self.assertEquals(response.status_code, 200)
alarm_definition = json.loads(response.data)
self.assertTrue(alarm_definition is not None)
response = self.client.get(url_for('main.get_alert_alarm_def', id=bad_alarm.id), headers=headers)
self.assertEquals(response.status_code, 200)
bad_alarm_definition = json.loads(response.data)
self.assertTrue(bad_alarm_definition is not None)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get user_event_notifications (1)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
response = self.client.get(url_for('main.get_user_event_notifications'), headers=headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue(data is not None)
notifications = data['notifications']
self.assertTrue(notifications is not None)
self.assertEquals(len(notifications), 2)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Get user_event_notification by id=1
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
url = url_for('main.get_user_event_notification', id=1)
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
notification = json.loads(response.data)
self.assertTrue(len(notification) > 0)
self.assertEquals(len(notification), 8)
"""
Error messages for the following tests:
1. bad_notification: {}
2. 'Invalid ID, user_event_notification record not found.'
3. 'Inconsistent ID, user_event_notification id provided in data does not match id provided.'
4. 'Inconsistent User ID, user_id provided in data does not match id.'
5. 'IntegrityError creating user_event_notification.'
6. (no error)
7. 'Insufficient data, or bad data format.'
"""
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (1) Get user_event_notification by id=5 (doesn't exist) response: {}
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
url = url_for('main.get_user_event_notification', id=5)
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
bad_notification = json.loads(response.data)
self.assertTrue(bad_notification is not None)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (2) (Negative) Update event_event_notification;
# error: 'Invalid ID, user_event_notification record not found.'
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test = {'user_id': 1,
'system_event_definition_id': test_alarm.id,
'use_email': False,
'use_log': False,
'use_phone': False,
'use_redmine': False,
'use_sms': False,
'id': 50}
bad_stuff = json.dumps(test)
response = self.client.put(url_for('main.update_user_event_notification', id=50), headers=headers, data=bad_stuff)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue('message' in notify_data)
self.assertTrue(notify_data['message'] is not None)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (3) (Negative) Update event_event_notification
# error: 'Inconsistent ID, user_event_notification id provided in data does not match id provided.'
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test = {'user_id': 999,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
'id': 1}
good_stuff = json.dumps(test)
response = self.client.put(url_for('main.update_user_event_notification', id=800), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue('message' in notify_data)
self.assertTrue(notify_data['message'] is not None)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (4) (Negative) Update user_event_notification, with invalid user_id
# error: 'Inconsistent User ID, user_id provided in data does not match id.'
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test = {'user_id': 1,
'system_event_definition_id': 2,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
'id': 2}
good_stuff = json.dumps(test)
response = self.client.put(url_for('main.update_user_event_notification', id=2), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue('message' in notify_data)
self.assertTrue(notify_data['message'] is not None)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (5) # (Negative) Update event_event_notification
# error: 'IntegrityError creating user_event_notification.'
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test = {'user_id': 2,
'system_event_definition_id': bad_alarm.id,
'use_email': False,
'use_log': 'log',
'use_phone': False,
'use_redmine': False,
'use_sms': False,
'id': 2}
bad_stuff = json.dumps(test)
response = self.client.put(url_for('main.update_user_event_notification', id=2), headers=headers, data=bad_stuff)
self.assertEquals(response.status_code, 400)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue('message' in notify_data)
self.assertTrue(notify_data['message'] is not None)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (6) (Positive) Update event_event_notification
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
test = {'user_id': 1,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
'id': 1}
good_stuff = json.dumps(test)
response = self.client.put(url_for('main.update_user_event_notification', id=1), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 201)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
notify = UserEventNotification.query.get(1)
for attribute in UserEventNotification.__table__.columns._data:
self.assertTrue(attribute in notify_data)
if attribute != 'user_id' or attribute != 'id':
self.assertEquals(getattr(notify,attribute), True)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (Negative) Update event_event_notification - expect failure, invalid user_id
# error 'Insufficient data, or bad data format.'
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
data = {'user_id': 10}
good_stuff = json.dumps(data)
response = self.client.put(url_for('main.update_user_event_notification', id=1), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 409)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue('message' in notify_data)
self.assertTrue(notify_data['message'] is not None)
if verbose: print '\n'
def test_user_event_notification_list_routes(self):
verbose = self.verbose
debug = self.debug
root = self.root
if verbose: print '\n'
content_type = 'application/json'
headers = self.get_api_headers('admin', 'test')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# # Create alert and an alarm
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
reference_designator = "CE01ISSP-XX099-01-CTDPFJ999"
# Create and alert and an alarm
test_alarm = self.create_alert_alarm_definition(reference_designator, event_type='alarm', uframe_id=2, severity=1)
test_alert = self.create_alert_alarm_definition(reference_designator, event_type='alert', uframe_id=-1, severity=1)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# GET alarm definition by SystemEventDefinition id
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
response = self.client.get(url_for('main.get_alert_alarm_def', id=test_alarm.id), headers=headers)
self.assertEquals(response.status_code, 200)
alarm_definition = json.loads(response.data)
if debug: print '\n -- alarm_definition: ', alarm_definition
self.assertTrue(alarm_definition is not None)
# Get user_event_notifications (1)
response = self.client.get(url_for('main.get_user_event_notifications'), headers=headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue(data is not None)
notifications = data['notifications']
self.assertTrue(notifications is not None)
self.assertEquals(len(notifications), 2)
url = url_for('main.get_user_event_notifications')
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue('notifications' in data)
self.assertTrue(len(data['notifications']) > 0)
self.assertEquals(len(data['notifications']), 2)
url = url_for('main.get_user_event_notification', id=1)
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
notification = json.loads(response.data)
self.assertTrue(len(notification) > 0)
self.assertEquals(len(notification), 8)
for attribute in UserEventNotification.__table__.columns._data:
self.assertTrue(attribute in notification)
url = url_for('main.get_user_event_notifications')
url += '?user_id=1'
response = self.client.get(url, content_type=content_type, headers=headers)
self.assertEquals(response.status_code, 200)
data = json.loads(response.data)
self.assertTrue('notifications' in data)
self.assertTrue(len(data['notifications']) > 0)
self.assertEquals(len(data['notifications']), 2)
for attribute in UserEventNotification.__table__.columns._data:
self.assertTrue(attribute in notification)
url = url_for('main.get_user_event_notifications')
url += '?user_id=5'
response = self.client.get(url, content_type=content_type, headers=headers)
data = json.loads(response.data)
self.assertEquals(response.status_code, 400)
data = json.loads(response.data)
self.assertTrue('message' in data)
self.assertTrue(len(data['message']) > 0)
url = url_for('main.get_user_event_notifications')
url += '?user_id=A'
response = self.client.get(url, content_type=content_type, headers=headers)
data = json.loads(response.data)
self.assertEquals(response.status_code, 409)
data = json.loads(response.data)
self.assertTrue('message' in data)
self.assertTrue(len(data['message']) > 0)
if verbose: print '\n'
def test_user_event_notification_has_required_fields(self):
verbose = self.verbose
if verbose: print '\n'
headers = self.get_api_headers('admin', 'test')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# Create alarm definition
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ref_def = "CE01ISSP-XX099-01-CTDPFJ999"
alarm1 = self.create_alert_alarm_definition_wo_notification(ref_def=ref_def, event_type='alarm',
uframe_filter_id=2, severity=1)
alert_alarm_definition_id = alarm1.id
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (Negative) Create user_event_notification - without required field user_id
# (error: 'Insufficient data, or bad data format.')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
data = {
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
# Create event_event_notification
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
notify_data = json.loads(response.data)
self.assertEquals(response.status_code, 409)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
self.assertTrue('message' in notify_data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (Negative) Create user_event_notification - required field use_sms is None
# (error: 'Insufficient data, or bad data format.')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
data = {'user_id': None,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': None,
}
good_stuff = json.dumps(data)
# Create event_event_notification
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 409)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
self.assertTrue('message' in notify_data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# (Positive) Create user_event_notification
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
data = {'user_id': 1,
'system_event_definition_id': 1,
'use_email': True,
'use_log': True,
'use_phone': True,
'use_redmine': True,
'use_sms': True,
}
good_stuff = json.dumps(data)
# Create event_event_notification
response = self.client.post(url_for('main.create_user_event_notification'), headers=headers, data=good_stuff)
self.assertEquals(response.status_code, 201)
notify_data = json.loads(response.data)
self.assertTrue(notify_data is not None)
self.assertTrue(len(notify_data) > 0)
if verbose: print '\n'
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# private test helper methods and tests
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def create_alert_alarm_definition(self, ref_def, event_type, uframe_id, severity):
# Note, creates a definition in test database only, just used to exercise SystemEventDefinition class
# but does NOT create alertfilter id in uframe. An alertfilter is created when the /alert_alarm_definition
# route is called.
#headers = self.get_api_headers('admin', 'test')
#valid_event_type = ['alert','alarm']
alert_alarm_definition = None
array_name = ref_def[0:0+2]
platform_name = ref_def[0:0+14]
instrument_parameter = 'temperature'
instrument_parameter_pdid = 'PD100'
operator = 'GREATER'
high_value = '10.0'
low_value = '1.0'
stream = 'ctdpf_j_cspp_instrument'
escalate_on = 5
escalate_boundary = 10
user_id = 1
use_email = False
use_redmine = True
use_phone = False
use_log = False
use_sms = True
create_time = dt.datetime.now()
'''
if ref_def == 'CP02PMCO-WFP01-02-DOFSTK000':
alert_alarm_definition = SystemEventDefinition(reference_designator=ref_def)
alert_alarm_definition.active = True
alert_alarm_definition.event_type = event_type
alert_alarm_definition.array_name = array_name
alert_alarm_definition.platform_name = platform_name
alert_alarm_definition.instrument_name = ref_def
alert_alarm_definition.instrument_parameter = instrument_parameter
alert_alarm_definition.instrument_parameter_pdid = instrument_parameter_pdid
alert_alarm_definition.operator = operator
alert_alarm_definition.created_time = create_time
alert_alarm_definition.uframe_filter_id = uframe_id
alert_alarm_definition.high_value = high_value
alert_alarm_definition.low_value = low_value
alert_alarm_definition.severity = severity
alert_alarm_definition.stream = stream
alert_alarm_definition.escalate_on = escalate_on
alert_alarm_definition.escalate_boundary = escalate_boundary
if event_type == 'alarm':
alert_alarm_definition.uframe_filter_id = uframe_id
try:
db.session.add(alert_alarm_definition)
db.session.commit()
except Exception as err:
print '\n *** CP02PMCO-WFP01-02-DOFSTK000 **** message: ', err.message
try:
# Create corresponding UserEventNotification when alert or alarm definition is created
new_id = UserEventNotification.insert_user_event_notification(
system_event_definition_id=alert_alarm_definition.id,
user_id=user_id,
use_email=use_email,
use_redmine=use_redmine,
use_phone=use_phone,
use_log=use_log,
use_sms=use_sms)
except Exception as err:
print '\n ******* Create CP02PMCO-WFP01-02-DOFSTK000 UserEventNotification message: \n', err.message
elif alert_alarm_definition == 'CP02PMCO-WFP01-03-CTDPFK000':
alert_alarm_definition = SystemEventDefinition(reference_designator=ref_def)
alert_alarm_definition.active = True
alert_alarm_definition.event_type = event_type
alert_alarm_definition.array_name = array_name
alert_alarm_definition.platform_name = platform_name
alert_alarm_definition.instrument_name = ref_def
alert_alarm_definition.instrument_parameter = instrument_parameter
alert_alarm_definition.instrument_parameter_pdid = instrument_parameter_pdid
alert_alarm_definition.operator = operator
alert_alarm_definition.created_time = create_time
alert_alarm_definition.uframe_filter_id = uframe_id
alert_alarm_definition.high_value = high_value
alert_alarm_definition.low_value = low_value
alert_alarm_definition.severity = severity
alert_alarm_definition.stream = stream
alert_alarm_definition.escalate_on = escalate_on
alert_alarm_definition.escalate_boundary = escalate_boundary
if event_type == 'alarm':
alert_alarm_definition.uframe_filter_id = uframe_id
try:
db.session.add(alert_alarm_definition)
db.session.commit()
except Exception as err:
print '\n ******* Create CP02PMCO-WFP01-03-CTDPFK000 alert_alarm_definition message: \n', err.message
try:
# Create corresponding UserEventNotification when alert or alarm definition is created
new_id = UserEventNotification.insert_user_event_notification(
system_event_definition_id=alert_alarm_definition.id,
user_id=user_id,
use_email=use_email,
use_redmine=use_redmine,
use_phone=use_phone,
use_log=use_log,
use_sms=use_sms)
except Exception as err:
print '\n ******* Create CP02PMCO-WFP01-03-CTDPFK000 UserEventNotification message: \n', err.message
elif alert_alarm_definition == 'CP02PMCO-WFP01-05-PARADK000':
alert_alarm_definition = SystemEventDefinition(reference_designator=ref_def)
alert_alarm_definition.active = True
alert_alarm_definition.event_type = event_type
alert_alarm_definition.array_name = array_name
alert_alarm_definition.platform_name = platform_name
alert_alarm_definition.instrument_name = ref_def
alert_alarm_definition.instrument_parameter = instrument_parameter
alert_alarm_definition.instrument_parameter_pdid = instrument_parameter_pdid
alert_alarm_definition.operator = operator
alert_alarm_definition.created_time = create_time
alert_alarm_definition.uframe_filter_id = uframe_id
alert_alarm_definition.high_value = high_value
alert_alarm_definition.low_value = low_value
alert_alarm_definition.severity = severity
alert_alarm_definition.stream = stream
alert_alarm_definition.escalate_on = escalate_on
alert_alarm_definition.escalate_boundary = escalate_boundary
if event_type == 'alarm':
alert_alarm_definition.uframe_filter_id = uframe_id
try:
db.session.add(alert_alarm_definition)
db.session.commit()
except Exception as err:
print '\n *** CP02PMCO-WFP01-05-PARADK000 **** message: ', err.message
try:
# Create corresponding UserEventNotification when alert or alarm definition is created
new_id = UserEventNotification.insert_user_event_notification(
system_event_definition_id=alert_alarm_definition.id,
user_id=user_id,
use_email=use_email,
use_redmine=use_redmine,
use_phone=use_phone,
use_log=use_log,
use_sms=use_sms)
except Exception as err:
print '\n ******* Create CP02PMCO-WFP01-05-PARADK000 UserEventNotification message: \n', err.message
else:
'''
if ref_def:
alert_alarm_definition = SystemEventDefinition(reference_designator=ref_def)
alert_alarm_definition.active = True
alert_alarm_definition.event_type = event_type
alert_alarm_definition.array_name = array_name
alert_alarm_definition.platform_name = platform_name
alert_alarm_definition.instrument_name = ref_def
alert_alarm_definition.instrument_parameter = instrument_parameter
alert_alarm_definition.instrument_parameter_pdid = instrument_parameter_pdid
alert_alarm_definition.operator = operator
alert_alarm_definition.created_time = create_time
alert_alarm_definition.uframe_filter_id = uframe_id
alert_alarm_definition.high_value = high_value
alert_alarm_definition.low_value = low_value
alert_alarm_definition.severity = severity
alert_alarm_definition.stream = stream
alert_alarm_definition.escalate_on = escalate_on
alert_alarm_definition.escalate_boundary = escalate_boundary
if event_type == 'alarm':
alert_alarm_definition.uframe_filter_id = uframe_id
try:
db.session.add(alert_alarm_definition)
db.session.commit()
except Exception as err:
print '\n *** %s **** message: %s' % (ref_def,err.message)
try:
# Create corresponding UserEventNotification when alert or alarm definition is created
new_id = UserEventNotification.insert_user_event_notification(
system_event_definition_id=alert_alarm_definition.id,
user_id=user_id,
use_email=use_email,
use_redmine=use_redmine,
use_phone=use_phone,
use_log=use_log,
use_sms=use_sms)
except Exception as err:
print '\n ******* Create UserEventNotification message: \n', err.message
return alert_alarm_definition
def create_alert_alarm_definition_wo_notification(self, ref_def, event_type, uframe_filter_id, severity):
# Note, creates a definition in test database only, just used to exercise SystemEventDefinition class
# but does NOT create alertfilter id in uframe. An alertfilter is created when the /alert_alarm_definition
# route is called.
array_name = ref_def[0:0+2]
platform_name = ref_def[0:0+14]
instrument_parameter = 'temperature'
instrument_parameter_pdid = 'PD100'
operator = 'GREATER'
high_value = '10.0'
low_value = '1.0'
stream = 'ctdpf_j_cspp_instrument'
escalate_on = 5
escalate_boundary = 10
create_time = dt.datetime.now()
alert_alarm_definition = SystemEventDefinition(reference_designator=ref_def)
alert_alarm_definition.active = True
alert_alarm_definition.event_type = event_type
alert_alarm_definition.array_name = array_name
alert_alarm_definition.platform_name = platform_name
alert_alarm_definition.instrument_name = ref_def
alert_alarm_definition.instrument_parameter = instrument_parameter
alert_alarm_definition.instrument_parameter_pdid = instrument_parameter_pdid
alert_alarm_definition.operator = operator
alert_alarm_definition.created_time = create_time
alert_alarm_definition.uframe_filter_id = uframe_filter_id
alert_alarm_definition.high_value = high_value
alert_alarm_definition.low_value = low_value
alert_alarm_definition.severity = severity
alert_alarm_definition.stream = stream
alert_alarm_definition.escalate_on = escalate_on
alert_alarm_definition.escalate_boundary = escalate_boundary
try:
db.session.add(alert_alarm_definition)
db.session.commit()
except Exception as err:
print '\n *** %s **** message: %s' % (ref_def,err.message)
return alert_alarm_definition
def create_user_event_notification(self, definition_id, user_id):
notification = None
user_id = user_id
use_email = False
use_redmine = True
use_phone = False
use_log = False
use_sms = True
try:
# Create corresponding UserEventNotification when alert or alarm definition is created
new_id = UserEventNotification.insert_user_event_notification(
system_event_definition_id=definition_id,
user_id=user_id,
use_email=use_email,
use_redmine=use_redmine,
use_phone=use_phone,
use_log=use_log,
use_sms=use_sms)
notification = UserEventNotification.query.get(new_id)
except Exception as err:
print '\n ******* Create CP02PMCO-WFP01-02-DOFSTK000 UserEventNotification message: \n', err.message
return notification
def get_uframe_info(self):
""" Get uframe alertalarm configuration information. """
uframe_url = self.app.config['UFRAME_ALERTS_URL']
timeout = self.app.config['UFRAME_TIMEOUT_CONNECT']
timeout_read = self.app.config['UFRAME_TIMEOUT_READ']
return uframe_url, timeout, timeout_read
'''
reference_designator = 'CE01ISSP-XX099-01-CTDPFJ999'
def make_fake_uframe_alertfilter_data(self):
""" Make uframe input data for evaluation/test processing; works in conjunction with manual ingestion process. """
result = {
"@class" : "com.raytheon.uf.common.ooi.dataplugin.alert.alertfilter.AlertFilterRecord",
"enabled" : True,
"stream" : "ctdpf_j_cspp_instrument",
"referenceDesignator" : {
"node" : "XX099",
"full" : True,
"subsite" : "CE01ISSP",
"sensor" : "01-CTDPFJ999"
},
"alertRule" : {
"filter" : "GREATER",
"valid" : True,
"highVal" : 31.0,
"errMessage" : None,
"lowVal" : 10.0
},
"pdId" : "PD440",
"eventId" : 2,
"alertMetadata" : {
"severity" : -2,
"description" : "Rule 42"
}
}
return result
'''
| 48.6957
| 124
| 0.564222
| 4,446
| 44,167
| 5.332659
| 0.075349
| 0.084778
| 0.09701
| 0.034544
| 0.874183
| 0.845881
| 0.820026
| 0.806487
| 0.781728
| 0.769876
| 0
| 0.01419
| 0.315507
| 44,167
| 906
| 125
| 48.749448
| 0.770045
| 0.152648
| 0
| 0.781651
| 0
| 0
| 0.114894
| 0.056666
| 0
| 0
| 0
| 0
| 0.201835
| 0
| null | null | 0.011009
| 0.020183
| null | null | 0.023853
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
696579e8970e3a6d687220052fc7c5b2075612e9
| 141
|
py
|
Python
|
2018.03.08/tennis/scoring.py
|
mobilityhouse/dojo
|
0d2d5fbe1ea240da7343f95a253b2aa61f45aed2
|
[
"BSD-3-Clause"
] | 1
|
2020-02-28T21:35:44.000Z
|
2020-02-28T21:35:44.000Z
|
2018.03.08/tennis/scoring.py
|
mobilityhouse/dojo
|
0d2d5fbe1ea240da7343f95a253b2aa61f45aed2
|
[
"BSD-3-Clause"
] | null | null | null |
2018.03.08/tennis/scoring.py
|
mobilityhouse/dojo
|
0d2d5fbe1ea240da7343f95a253b2aa61f45aed2
|
[
"BSD-3-Clause"
] | null | null | null |
class Score:
def player_one_scores():
pass
def player_two_scores():
pass
def get_score():
return None
| 12.818182
| 28
| 0.567376
| 17
| 141
| 4.411765
| 0.647059
| 0.24
| 0.346667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.361702
| 141
| 10
| 29
| 14.1
| 0.833333
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| true
| 0.285714
| 0
| 0.142857
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
696b6d7571bd4f2ab71adaba63d946fc7e58d62e
| 226
|
py
|
Python
|
BPt/default/options/__init__.py
|
sahahn/BPt
|
1a2967f4ca3fa070b7417a4f59a218ae171daadd
|
[
"MIT"
] | 6
|
2020-11-06T15:45:28.000Z
|
2022-03-08T19:15:35.000Z
|
BPt/default/options/__init__.py
|
sahahn/BPt
|
1a2967f4ca3fa070b7417a4f59a218ae171daadd
|
[
"MIT"
] | 14
|
2020-10-20T13:55:23.000Z
|
2022-01-25T17:36:07.000Z
|
BPt/default/options/__init__.py
|
sahahn/BPt
|
1a2967f4ca3fa070b7417a4f59a218ae171daadd
|
[
"MIT"
] | 2
|
2020-10-23T19:48:53.000Z
|
2020-11-06T15:46:04.000Z
|
from . import (imputers, models, ensembles, feature_selectors,
loaders, scalers, transformers)
__all__ = ['imputers', 'models', 'ensembles', 'feature_selectors',
'loaders', 'scalers', 'transformers']
| 45.2
| 66
| 0.659292
| 19
| 226
| 7.526316
| 0.578947
| 0.195804
| 0.321678
| 0.41958
| 0.909091
| 0.909091
| 0.909091
| 0.909091
| 0
| 0
| 0
| 0
| 0.199115
| 226
| 5
| 67
| 45.2
| 0.790055
| 0
| 0
| 0
| 0
| 0
| 0.290749
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
696fc787012fb675e9a002d5ff43efe89e153b27
| 409
|
py
|
Python
|
Mundo 3/ex109/test1.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | 1
|
2021-03-10T21:53:38.000Z
|
2021-03-10T21:53:38.000Z
|
Mundo 3/ex109/test1.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | null | null | null |
Mundo 3/ex109/test1.py
|
RafaelSdm/Curso-de-Python
|
ae933ba80ee00ad5160bd5d05cf4b21007943fd4
|
[
"MIT"
] | null | null | null |
from ex109 import moeda
dinheiro = float(input("informe o dinheiro"))
print(f"o dobro de {moeda.moeda(dinheiro)} é {moeda.dobro(dinheiro,True)}")
print(f"a metade de {moeda.moeda(dinheiro)} é {moeda.metade(dinheiro,True)}")
print(f"aumentando 10% de {moeda.moeda(dinheiro)} temos R${moeda.porcentagem(dinheiro,True)}")
print(f"reduzindo 13% de {moeda.moeda(dinheiro)} temos {moeda.reduzindo(dinheiro, True)}")
| 68.166667
| 94
| 0.748166
| 63
| 409
| 4.857143
| 0.380952
| 0.212418
| 0.156863
| 0.261438
| 0.333333
| 0.169935
| 0
| 0
| 0
| 0
| 0
| 0.018617
| 0.080685
| 409
| 6
| 95
| 68.166667
| 0.795213
| 0
| 0
| 0
| 0
| 0.333333
| 0.765854
| 0.514634
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
15f020578759651117795f4bb9446024ac30e11f
| 179,283
|
py
|
Python
|
memsource_cli/api/project_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | 1
|
2020-07-24T16:29:32.000Z
|
2020-07-24T16:29:32.000Z
|
memsource_cli/api/project_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | null | null | null |
memsource_cli/api/project_api.py
|
zerodayz/memsource-cli-client
|
c2574f1467539a49e6637c874e88d75c7ef789b3
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:support@memsource.com>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from memsource_cli.api_client import ApiClient
class ProjectApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_target_language_to_project(self, project_uid, **kwargs): # noqa: E501
"""Add target languages # noqa: E501
Add target languages to project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_target_language_to_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param AddTargetLangDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_target_language_to_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.add_target_language_to_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def add_target_language_to_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Add target languages # noqa: E501
Add target languages to project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_target_language_to_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param AddTargetLangDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_target_language_to_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `add_target_language_to_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/targetLangs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_workflow_steps(self, project_uid, **kwargs): # noqa: E501
"""Add workflow steps # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_workflow_steps(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param AddWorkflowStepsDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_workflow_steps_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.add_workflow_steps_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def add_workflow_steps_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Add workflow steps # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_workflow_steps_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param AddWorkflowStepsDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_workflow_steps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `add_workflow_steps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/workflowSteps', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_linguists_from_template(self, template_id, project_uid, **kwargs): # noqa: E501
"""Assigns providers from template # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_linguists_from_template(template_id, project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int template_id: (required)
:param str project_uid: (required)
:return: JobPartsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_linguists_from_template_with_http_info(template_id, project_uid, **kwargs) # noqa: E501
else:
(data) = self.assign_linguists_from_template_with_http_info(template_id, project_uid, **kwargs) # noqa: E501
return data
def assign_linguists_from_template_with_http_info(self, template_id, project_uid, **kwargs): # noqa: E501
"""Assigns providers from template # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_linguists_from_template_with_http_info(template_id, project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int template_id: (required)
:param str project_uid: (required)
:return: JobPartsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_linguists_from_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `assign_linguists_from_template`") # noqa: E501
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `assign_linguists_from_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['templateId'] = params['template_id'] # noqa: E501
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/applyTemplate/{templateId}/assignProviders', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JobPartsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_linguists_from_template_to_job_parts(self, template_id, project_uid, **kwargs): # noqa: E501
"""Assigns providers from template (specific jobs) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_linguists_from_template_to_job_parts(template_id, project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int template_id: (required)
:param str project_uid: (required)
:param JobPartReferences body:
:return: JobPartsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_linguists_from_template_to_job_parts_with_http_info(template_id, project_uid, **kwargs) # noqa: E501
else:
(data) = self.assign_linguists_from_template_to_job_parts_with_http_info(template_id, project_uid, **kwargs) # noqa: E501
return data
def assign_linguists_from_template_to_job_parts_with_http_info(self, template_id, project_uid, **kwargs): # noqa: E501
"""Assigns providers from template (specific jobs) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_linguists_from_template_to_job_parts_with_http_info(template_id, project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int template_id: (required)
:param str project_uid: (required)
:param JobPartReferences body:
:return: JobPartsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_linguists_from_template_to_job_parts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `assign_linguists_from_template_to_job_parts`") # noqa: E501
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `assign_linguists_from_template_to_job_parts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['templateId'] = params['template_id'] # noqa: E501
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/applyTemplate/{templateId}/assignProviders/forJobParts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JobPartsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_vendor_to_project(self, project_uid, **kwargs): # noqa: E501
"""Assign vendor # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_vendor_to_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param AssignVendorDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_vendor_to_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.assign_vendor_to_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def assign_vendor_to_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Assign vendor # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_vendor_to_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param AssignVendorDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_vendor_to_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `assign_vendor_to_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/assignVendor', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assignable_templates(self, project_uid, **kwargs): # noqa: E501
"""List assignable templates # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assignable_templates(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: AssignableTemplatesDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assignable_templates_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.assignable_templates_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def assignable_templates_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""List assignable templates # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assignable_templates_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: AssignableTemplatesDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assignable_templates" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `assignable_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/assignableTemplates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AssignableTemplatesDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def clone_project(self, project_uid, **kwargs): # noqa: E501
"""Clone project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clone_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param CloneProjectDto body:
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.clone_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.clone_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def clone_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Clone project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clone_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param CloneProjectDto body:
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method clone_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `clone_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/clone', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbstractProjectDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_note_ref(self, project_uid, **kwargs): # noqa: E501
"""Create project reference file # noqa: E501
Accepts application/octet-stream or application/json.<br> <b>application/json</b> - Note will be converted to .txt.<br> <b>application/octet-stream</b> - Content-Disposition header is required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_note_ref(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param CreateReferenceFileNoteDto body:
:param str content_disposition: <b>Required</b> for application/octet-stream.<br> Example: filename*=UTF-8''YourFileName.txt
:return: ReferenceFileReference
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_note_ref_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.create_note_ref_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def create_note_ref_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Create project reference file # noqa: E501
Accepts application/octet-stream or application/json.<br> <b>application/json</b> - Note will be converted to .txt.<br> <b>application/octet-stream</b> - Content-Disposition header is required # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_note_ref_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param CreateReferenceFileNoteDto body:
:param str content_disposition: <b>Required</b> for application/octet-stream.<br> Example: filename*=UTF-8''YourFileName.txt
:return: ReferenceFileReference
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body', 'content_disposition'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_note_ref" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `create_note_ref`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
if 'content_disposition' in params:
header_params['Content-Disposition'] = params['content_disposition'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/octet-stream']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/references', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceFileReference', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_project(self, **kwargs): # noqa: E501
"""Create project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateProjectDto body:
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_project_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_project_with_http_info(**kwargs) # noqa: E501
return data
def create_project_with_http_info(self, **kwargs): # noqa: E501
"""Create project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateProjectDto body:
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbstractProjectDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_project_from_template_v2(self, template_id, **kwargs): # noqa: E501
"""Create project from template # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_from_template_v2(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int template_id: (required)
:param CreateProjectFromTemplateV2Dto body:
:return: AbstractProjectDtoV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_project_from_template_v2_with_http_info(template_id, **kwargs) # noqa: E501
else:
(data) = self.create_project_from_template_v2_with_http_info(template_id, **kwargs) # noqa: E501
return data
def create_project_from_template_v2_with_http_info(self, template_id, **kwargs): # noqa: E501
"""Create project from template # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_project_from_template_v2_with_http_info(template_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int template_id: (required)
:param CreateProjectFromTemplateV2Dto body:
:return: AbstractProjectDtoV2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['template_id', 'body', 'token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_project_from_template_v2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'template_id' is set
if ('template_id' not in params or
params['template_id'] is None):
raise ValueError("Missing the required parameter `template_id` when calling `create_project_from_template_v2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'template_id' in params:
path_params['templateId'] = params['template_id'] # noqa: E501
query_params = []
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v2/projects/applyTemplate/{templateId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbstractProjectDtoV2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_project(self, project_uid, **kwargs): # noqa: E501
"""Delete project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param bool purge:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.delete_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def delete_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Delete project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param bool purge:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'purge', 'token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `delete_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
if 'purge' in params:
query_params.append(('purge', params['purge'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def download_reference(self, project_uid, reference_file_id, **kwargs): # noqa: E501
"""Get project reference # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_reference(project_uid, reference_file_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param str reference_file_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.download_reference_with_http_info(project_uid, reference_file_id, **kwargs) # noqa: E501
else:
(data) = self.download_reference_with_http_info(project_uid, reference_file_id, **kwargs) # noqa: E501
return data
def download_reference_with_http_info(self, project_uid, reference_file_id, **kwargs): # noqa: E501
"""Get project reference # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.download_reference_with_http_info(project_uid, reference_file_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param str reference_file_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'reference_file_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method download_reference" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `download_reference`") # noqa: E501
# verify the required parameter 'reference_file_id' is set
if ('reference_file_id' not in params or
params['reference_file_id'] is None):
raise ValueError("Missing the required parameter `reference_file_id` when calling `download_reference`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
if 'reference_file_id' in params:
path_params['referenceFileId'] = params['reference_file_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/references/{referenceFileId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_project(self, project_uid, **kwargs): # noqa: E501
"""Edit project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.edit_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectDto body:
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.edit_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.edit_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def edit_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.edit_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectDto body:
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `edit_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbstractProjectDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def edit_project_access_settings(self, project_uid, **kwargs): # noqa: E501
"""Edit access and security settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.edit_project_access_settings(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectSecuritySettingsDto body:
:return: ProjectSecuritySettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.edit_project_access_settings_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.edit_project_access_settings_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def edit_project_access_settings_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit access and security settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.edit_project_access_settings_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectSecuritySettingsDto body:
:return: ProjectSecuritySettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method edit_project_access_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `edit_project_access_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/accessSettings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectSecuritySettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def enabled_quality_checks(self, project_uid, **kwargs): # noqa: E501
"""Get QA checks # noqa: E501
Returns enabled quality assurance settings. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enabled_quality_checks(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: EnabledQualityChecksDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.enabled_quality_checks_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.enabled_quality_checks_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def enabled_quality_checks_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get QA checks # noqa: E501
Returns enabled quality assurance settings. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enabled_quality_checks_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: EnabledQualityChecksDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method enabled_quality_checks" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `enabled_quality_checks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/qaSettingsChecks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EnabledQualityChecksDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_analyse_settings_for_project(self, project_uid, **kwargs): # noqa: E501
"""Get analyse settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_analyse_settings_for_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: AnalyseSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_analyse_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_analyse_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_analyse_settings_for_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get analyse settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_analyse_settings_for_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: AnalyseSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_analyse_settings_for_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_analyse_settings_for_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/analyseSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AnalyseSettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_financial_settings(self, project_uid, **kwargs): # noqa: E501
"""Get financial settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_financial_settings(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: FinancialSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_financial_settings_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_financial_settings_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_financial_settings_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get financial settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_financial_settings_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: FinancialSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_financial_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_financial_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/financialSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FinancialSettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_import_settings2(self, project_uid, **kwargs): # noqa: E501
"""Get projects's default import settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_import_settings2(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: FileImportSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_import_settings2_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_import_settings2_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_import_settings2_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get projects's default import settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_import_settings2_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: FileImportSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_import_settings2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_import_settings2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/importSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileImportSettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_mt_settings_for_project(self, project_uid, **kwargs): # noqa: E501
"""Get machine translate settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mt_settings_for_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: MTSettingsPerLanguageListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_mt_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_mt_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_mt_settings_for_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get machine translate settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mt_settings_for_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: MTSettingsPerLanguageListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mt_settings_for_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_mt_settings_for_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/mtSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MTSettingsPerLanguageListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_pre_translate_settings_for_project(self, project_uid, **kwargs): # noqa: E501
"""Get Pre-translate settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pre_translate_settings_for_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: PreTranslateSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_pre_translate_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_pre_translate_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_pre_translate_settings_for_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get Pre-translate settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_pre_translate_settings_for_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: PreTranslateSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_pre_translate_settings_for_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_pre_translate_settings_for_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/preTranslateSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PreTranslateSettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project(self, project_uid, **kwargs): # noqa: E501
"""Get project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: AbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AbstractProjectDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_access_settings(self, project_uid, **kwargs): # noqa: E501
"""Get access and security settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_access_settings(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectSecuritySettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_access_settings_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_access_settings_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_access_settings_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get access and security settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_access_settings_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectSecuritySettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_access_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_access_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/accessSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectSecuritySettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_assignments(self, project_uid, **kwargs): # noqa: E501
"""List project providers # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_assignments(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param str provider_name:
:param int page_number: Page number, starting with 0, default 0
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoProviderReference
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_assignments_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_assignments_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_assignments_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""List project providers # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_assignments_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param str provider_name:
:param int page_number: Page number, starting with 0, default 0
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoProviderReference
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'provider_name', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_assignments" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_assignments`") # noqa: E501
if 'page_number' in params and params['page_number'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `page_number` when calling `get_project_assignments`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_size' in params and params['page_size'] > 50: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `get_project_assignments`, must be a value less than or equal to `50`") # noqa: E501
if 'page_size' in params and params['page_size'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `get_project_assignments`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
if 'provider_name' in params:
query_params.append(('providerName', params['provider_name'])) # noqa: E501
if 'page_number' in params:
query_params.append(('pageNumber', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/providers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDtoProviderReference', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_qa_settings_v2(self, project_uid, **kwargs): # noqa: E501
"""Get quality assurance settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_qa_settings_v2(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: QASettingsDtoV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_qa_settings_v2_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_qa_settings_v2_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_qa_settings_v2_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get quality assurance settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_qa_settings_v2_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: QASettingsDtoV2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_qa_settings_v2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_qa_settings_v2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v2/projects/{projectUid}/qaSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QASettingsDtoV2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_settings(self, project_uid, **kwargs): # noqa: E501
"""Get LQA settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_settings(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param int workflow_level:
:return: LqaSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_settings_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_settings_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_settings_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get LQA settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_settings_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param int workflow_level:
:return: LqaSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'workflow_level'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_settings`") # noqa: E501
if 'workflow_level' in params and params['workflow_level'] > 15: # noqa: E501
raise ValueError("Invalid value for parameter `workflow_level` when calling `get_project_settings`, must be a value less than or equal to `15`") # noqa: E501
if 'workflow_level' in params and params['workflow_level'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `workflow_level` when calling `get_project_settings`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
if 'workflow_level' in params:
query_params.append(('workflowLevel', params['workflow_level'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/lqaSettings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LqaSettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_term_bases(self, project_uid, **kwargs): # noqa: E501
"""Get term bases # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_term_bases(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectTermBaseListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_term_bases_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_term_bases_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_term_bases_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get term bases # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_term_bases_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectTermBaseListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_term_bases" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_term_bases`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/termBases', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectTermBaseListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_trans_memories(self, project_uid, **kwargs): # noqa: E501
"""Get translation memories # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_trans_memories(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectTransMemoryListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_trans_memories_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_trans_memories_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_trans_memories_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get translation memories # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_trans_memories_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectTransMemoryListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_trans_memories" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_trans_memories`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/transMemories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectTransMemoryListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_project_workflow_steps(self, project_uid, **kwargs): # noqa: E501
"""Get workflow steps # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_workflow_steps(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectWorkflowStepListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_project_workflow_steps_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_project_workflow_steps_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_project_workflow_steps_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get workflow steps # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_project_workflow_steps_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProjectWorkflowStepListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_project_workflow_steps" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_project_workflow_steps`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/workflowSteps', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectWorkflowStepListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_quotes_for_project(self, project_uid, **kwargs): # noqa: E501
"""List quotes # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_quotes_for_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param int page_number:
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoQuoteDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_quotes_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.get_quotes_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def get_quotes_for_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""List quotes # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_quotes_for_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param int page_number:
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoQuoteDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_quotes_for_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `get_quotes_for_project`") # noqa: E501
if 'page_number' in params and params['page_number'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `page_number` when calling `get_quotes_for_project`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_size' in params and params['page_size'] > 50: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `get_quotes_for_project`, must be a value less than or equal to `50`") # noqa: E501
if 'page_size' in params and params['page_size'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `get_quotes_for_project`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
if 'page_number' in params:
query_params.append(('pageNumber', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/quotes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDtoQuoteDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_assigned_projects(self, user_id, **kwargs): # noqa: E501
"""List assigned projects # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_assigned_projects(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int user_id: (required)
:param list[str] status:
:param list[str] target_lang:
:param int workflow_step_id:
:param int due_in_hours: -1 for jobs that are overdue
:param str filename:
:param str project_name:
:param int page_number:
:param int page_size:
:return: PageDtoProjectReference
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_assigned_projects_with_http_info(user_id, **kwargs) # noqa: E501
else:
(data) = self.list_assigned_projects_with_http_info(user_id, **kwargs) # noqa: E501
return data
def list_assigned_projects_with_http_info(self, user_id, **kwargs): # noqa: E501
"""List assigned projects # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_assigned_projects_with_http_info(user_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int user_id: (required)
:param list[str] status:
:param list[str] target_lang:
:param int workflow_step_id:
:param int due_in_hours: -1 for jobs that are overdue
:param str filename:
:param str project_name:
:param int page_number:
:param int page_size:
:return: PageDtoProjectReference
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'status', 'target_lang', 'workflow_step_id', 'due_in_hours', 'filename', 'project_name', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_assigned_projects" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params or
params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `list_assigned_projects`") # noqa: E501
if 'due_in_hours' in params and params['due_in_hours'] < -1: # noqa: E501
raise ValueError("Invalid value for parameter `due_in_hours` when calling `list_assigned_projects`, must be a value greater than or equal to `-1`") # noqa: E501
if 'page_number' in params and params['page_number'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `page_number` when calling `list_assigned_projects`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_size' in params and params['page_size'] > 50: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `list_assigned_projects`, must be a value less than or equal to `50`") # noqa: E501
if 'page_size' in params and params['page_size'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `list_assigned_projects`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id'] # noqa: E501
query_params = []
if 'status' in params:
query_params.append(('status', params['status'])) # noqa: E501
collection_formats['status'] = 'multi' # noqa: E501
if 'target_lang' in params:
query_params.append(('targetLang', params['target_lang'])) # noqa: E501
collection_formats['targetLang'] = 'multi' # noqa: E501
if 'workflow_step_id' in params:
query_params.append(('workflowStepId', params['workflow_step_id'])) # noqa: E501
if 'due_in_hours' in params:
query_params.append(('dueInHours', params['due_in_hours'])) # noqa: E501
if 'filename' in params:
query_params.append(('filename', params['filename'])) # noqa: E501
if 'project_name' in params:
query_params.append(('projectName', params['project_name'])) # noqa: E501
if 'page_number' in params:
query_params.append(('pageNumber', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/users/{userId}/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDtoProjectReference', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_by_project_v2(self, project_uid, **kwargs): # noqa: E501
"""List analyses by project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_by_project_v2(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param int page_number:
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoAnalyseV2Dto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_by_project_v2_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.list_by_project_v2_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def list_by_project_v2_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""List analyses by project # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_by_project_v2_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param int page_number:
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoAnalyseV2Dto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_by_project_v2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `list_by_project_v2`") # noqa: E501
if 'page_number' in params and params['page_number'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `page_number` when calling `list_by_project_v2`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_size' in params and params['page_size'] > 50: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `list_by_project_v2`, must be a value less than or equal to `50`") # noqa: E501
if 'page_size' in params and params['page_size'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `list_by_project_v2`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
if 'page_number' in params:
query_params.append(('pageNumber', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v2/projects/{projectUid}/analyses', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDtoAnalyseV2Dto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_projects(self, **kwargs): # noqa: E501
"""List projects # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_projects(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name:
:param int client_id:
:param str client_name:
:param str business_unit_name:
:param list[str] statuses:
:param list[str] target_langs:
:param str domain_name:
:param str sub_domain_name:
:param int cost_center_id:
:param str cost_center_name:
:param int due_in_hours: -1 for projects that are overdue
:param int created_in_last_hours:
:param list[str] source_langs:
:param int owner_id:
:param list[str] job_statuses: Allowed for linguists only
:param str job_status_group: Allowed for linguists only
:param int buyer_id:
:param int page_number: Page number, starting with 0, default 0
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoAbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_projects_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_projects_with_http_info(**kwargs) # noqa: E501
return data
def list_projects_with_http_info(self, **kwargs): # noqa: E501
"""List projects # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_projects_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name:
:param int client_id:
:param str client_name:
:param str business_unit_name:
:param list[str] statuses:
:param list[str] target_langs:
:param str domain_name:
:param str sub_domain_name:
:param int cost_center_id:
:param str cost_center_name:
:param int due_in_hours: -1 for projects that are overdue
:param int created_in_last_hours:
:param list[str] source_langs:
:param int owner_id:
:param list[str] job_statuses: Allowed for linguists only
:param str job_status_group: Allowed for linguists only
:param int buyer_id:
:param int page_number: Page number, starting with 0, default 0
:param int page_size: Page size, accepts values between 1 and 50, default 50
:return: PageDtoAbstractProjectDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'client_id', 'client_name', 'business_unit_name', 'statuses', 'target_langs', 'domain_name', 'sub_domain_name', 'cost_center_id', 'cost_center_name', 'due_in_hours', 'created_in_last_hours', 'source_langs', 'owner_id', 'job_statuses', 'job_status_group', 'buyer_id', 'page_number', 'page_size', 'token'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_projects" % key
)
params[key] = val
del params['kwargs']
if 'due_in_hours' in params and params['due_in_hours'] < -1: # noqa: E501
raise ValueError("Invalid value for parameter `due_in_hours` when calling `list_projects`, must be a value greater than or equal to `-1`") # noqa: E501
if 'created_in_last_hours' in params and params['created_in_last_hours'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `created_in_last_hours` when calling `list_projects`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_number' in params and params['page_number'] < 0: # noqa: E501
raise ValueError("Invalid value for parameter `page_number` when calling `list_projects`, must be a value greater than or equal to `0`") # noqa: E501
if 'page_size' in params and params['page_size'] > 50: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `list_projects`, must be a value less than or equal to `50`") # noqa: E501
if 'page_size' in params and params['page_size'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `page_size` when calling `list_projects`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'client_id' in params:
query_params.append(('clientId', params['client_id'])) # noqa: E501
if 'client_name' in params:
query_params.append(('clientName', params['client_name'])) # noqa: E501
if 'business_unit_name' in params:
query_params.append(('businessUnitName', params['business_unit_name'])) # noqa: E501
if 'statuses' in params:
query_params.append(('statuses', params['statuses'])) # noqa: E501
collection_formats['statuses'] = 'multi' # noqa: E501
if 'target_langs' in params:
query_params.append(('targetLangs', params['target_langs'])) # noqa: E501
collection_formats['targetLangs'] = 'multi' # noqa: E501
if 'domain_name' in params:
query_params.append(('domainName', params['domain_name'])) # noqa: E501
if 'sub_domain_name' in params:
query_params.append(('subDomainName', params['sub_domain_name'])) # noqa: E501
if 'cost_center_id' in params:
query_params.append(('costCenterId', params['cost_center_id'])) # noqa: E501
if 'cost_center_name' in params:
query_params.append(('costCenterName', params['cost_center_name'])) # noqa: E501
if 'due_in_hours' in params:
query_params.append(('dueInHours', params['due_in_hours'])) # noqa: E501
if 'created_in_last_hours' in params:
query_params.append(('createdInLastHours', params['created_in_last_hours'])) # noqa: E501
if 'source_langs' in params:
query_params.append(('sourceLangs', params['source_langs'])) # noqa: E501
collection_formats['sourceLangs'] = 'multi' # noqa: E501
if 'owner_id' in params:
query_params.append(('ownerId', params['owner_id'])) # noqa: E501
if 'job_statuses' in params:
query_params.append(('jobStatuses', params['job_statuses'])) # noqa: E501
collection_formats['jobStatuses'] = 'multi' # noqa: E501
if 'job_status_group' in params:
query_params.append(('jobStatusGroup', params['job_status_group'])) # noqa: E501
if 'buyer_id' in params:
query_params.append(('buyerId', params['buyer_id'])) # noqa: E501
if 'page_number' in params:
query_params.append(('pageNumber', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'token' in params:
query_params.append(('token', params['token'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDtoAbstractProjectDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_providers(self, project_uid, **kwargs): # noqa: E501
"""Get suggested providers # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_providers(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProviderListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_providers_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.list_providers_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def list_providers_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Get suggested providers # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_providers_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:return: ProviderListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_providers" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `list_providers`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/providers/suggest', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProviderListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_segment(self, project_uid, **kwargs): # noqa: E501
"""Search translation memory for segment in the project # noqa: E501
Returns at most <i>maxSegments</i> records with <i>score >= scoreThreshold</i> and at most <i>maxSubsegments</i> records which are subsegment, i.e. the source text is substring of the query text. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_segment(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SearchTMRequestDto body:
:return: SearchResponseListTmDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_segment_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.search_segment_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def search_segment_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Search translation memory for segment in the project # noqa: E501
Returns at most <i>maxSegments</i> records with <i>score >= scoreThreshold</i> and at most <i>maxSubsegments</i> records which are subsegment, i.e. the source text is substring of the query text. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_segment_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SearchTMRequestDto body:
:return: SearchResponseListTmDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_segment" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `search_segment`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/transMemories/searchSegmentInProject', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SearchResponseListTmDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_financial_settings(self, project_uid, **kwargs): # noqa: E501
"""Edit financial settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_financial_settings(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetFinancialSettingsDto body:
:return: FinancialSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_financial_settings_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_financial_settings_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_financial_settings_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit financial settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_financial_settings_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetFinancialSettingsDto body:
:return: FinancialSettingsDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_financial_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_financial_settings`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/financialSettings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FinancialSettingsDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_mt_settings_for_project(self, project_uid, **kwargs): # noqa: E501
"""Edit machine translate settings # noqa: E501
This will erase all mtSettings per language for project. To remove all machine translate settings from project call without a machineTranslateSettings parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_mt_settings_for_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectMTSettingsDto body:
:return: MTSettingsPerLanguageListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_mt_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_mt_settings_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_mt_settings_for_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit machine translate settings # noqa: E501
This will erase all mtSettings per language for project. To remove all machine translate settings from project call without a machineTranslateSettings parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_mt_settings_for_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectMTSettingsDto body:
:return: MTSettingsPerLanguageListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_mt_settings_for_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_mt_settings_for_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/mtSettings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MTSettingsPerLanguageListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_mt_settings_per_language_for_project(self, project_uid, **kwargs): # noqa: E501
"""Edit machine translate settings per language # noqa: E501
This will erase mtSettings for project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_mt_settings_per_language_for_project(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectMTSettPerLangListDto body:
:return: MTSettingsPerLanguageListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_mt_settings_per_language_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_mt_settings_per_language_for_project_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_mt_settings_per_language_for_project_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit machine translate settings per language # noqa: E501
This will erase mtSettings for project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_mt_settings_per_language_for_project_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditProjectMTSettPerLangListDto body:
:return: MTSettingsPerLanguageListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_mt_settings_per_language_for_project" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_mt_settings_per_language_for_project`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/mtSettingsPerLanguage', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MTSettingsPerLanguageListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_project_qa_settings_v2(self, project_uid, **kwargs): # noqa: E501
"""Edit quality assurance settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_qa_settings_v2(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditQASettingsDtoV2 body:
:return: QASettingsDtoV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_project_qa_settings_v2_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_project_qa_settings_v2_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_project_qa_settings_v2_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit quality assurance settings # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_qa_settings_v2_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param EditQASettingsDtoV2 body:
:return: QASettingsDtoV2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_project_qa_settings_v2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_project_qa_settings_v2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v2/projects/{projectUid}/qaSettings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='QASettingsDtoV2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_project_status(self, project_uid, **kwargs): # noqa: E501
"""Edit project status # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_status(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetProjectStatusDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_project_status_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_project_status_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_project_status_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit project status # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_status_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetProjectStatusDto body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_project_status" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_project_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/setStatus', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_project_term_bases(self, project_uid, **kwargs): # noqa: E501
"""Edit term bases # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_term_bases(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetTermBaseDto body:
:return: ProjectTermBaseListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_project_term_bases_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_project_term_bases_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_project_term_bases_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit term bases # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_term_bases_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetTermBaseDto body:
:return: ProjectTermBaseListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_project_term_bases" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_project_term_bases`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v1/projects/{projectUid}/termBases', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectTermBaseListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_project_trans_memories_v2(self, project_uid, **kwargs): # noqa: E501
"""Edit translation memories # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_trans_memories_v2(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetProjectTransMemoriesV2Dto body:
:return: ProjectTransMemoryListDtoV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_project_trans_memories_v2_with_http_info(project_uid, **kwargs) # noqa: E501
else:
(data) = self.set_project_trans_memories_v2_with_http_info(project_uid, **kwargs) # noqa: E501
return data
def set_project_trans_memories_v2_with_http_info(self, project_uid, **kwargs): # noqa: E501
"""Edit translation memories # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_project_trans_memories_v2_with_http_info(project_uid, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str project_uid: (required)
:param SetProjectTransMemoriesV2Dto body:
:return: ProjectTransMemoryListDtoV2
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['project_uid', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_project_trans_memories_v2" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'project_uid' is set
if ('project_uid' not in params or
params['project_uid'] is None):
raise ValueError("Missing the required parameter `project_uid` when calling `set_project_trans_memories_v2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'project_uid' in params:
path_params['projectUid'] = params['project_uid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api2/v2/projects/{projectUid}/transMemories', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProjectTransMemoryListDtoV2', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.176619
| 421
| 0.615016
| 20,454
| 179,283
| 5.11719
| 0.019507
| 0.056407
| 0.021936
| 0.028204
| 0.969131
| 0.960819
| 0.952889
| 0.949449
| 0.946354
| 0.941644
| 0
| 0.019657
| 0.294021
| 179,283
| 4,353
| 422
| 41.186079
| 0.807292
| 0.306672
| 0
| 0.808144
| 0
| 0.008396
| 0.223568
| 0.061174
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034845
| false
| 0
| 0.005038
| 0
| 0.09194
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
15fe78ccce50e76fd43b1deda3345f92aab20cfa
| 6,938
|
py
|
Python
|
tests/test_provider_cloudflare_cloudflare.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
tests/test_provider_cloudflare_cloudflare.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
tests/test_provider_cloudflare_cloudflare.py
|
mjuenema/python-terrascript
|
6d8bb0273a14bfeb8ff8e950fe36f97f7c6e7b1d
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# tests/test_provider_cloudflare_cloudflare.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:14:07 UTC)
def test_provider_import():
import terrascript.provider.cloudflare.cloudflare
def test_resource_import():
from terrascript.resource.cloudflare.cloudflare import cloudflare_access_application
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_access_ca_certificate,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_access_group
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_access_identity_provider,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_access_keys_configuration,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_access_mutual_tls_certificate,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_access_policy
from terrascript.resource.cloudflare.cloudflare import cloudflare_access_rule
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_access_service_token,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_account_member
from terrascript.resource.cloudflare.cloudflare import cloudflare_api_token
from terrascript.resource.cloudflare.cloudflare import cloudflare_argo
from terrascript.resource.cloudflare.cloudflare import cloudflare_argo_tunnel
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_authenticated_origin_pulls,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_authenticated_origin_pulls_certificate,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_byo_ip_prefix
from terrascript.resource.cloudflare.cloudflare import cloudflare_certificate_pack
from terrascript.resource.cloudflare.cloudflare import cloudflare_custom_hostname
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_custom_hostname_fallback_origin,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_custom_pages
from terrascript.resource.cloudflare.cloudflare import cloudflare_custom_ssl
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_device_posture_rule,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_filter
from terrascript.resource.cloudflare.cloudflare import cloudflare_firewall_rule
from terrascript.resource.cloudflare.cloudflare import cloudflare_healthcheck
from terrascript.resource.cloudflare.cloudflare import cloudflare_ip_list
from terrascript.resource.cloudflare.cloudflare import cloudflare_load_balancer
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_load_balancer_monitor,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_load_balancer_pool
from terrascript.resource.cloudflare.cloudflare import cloudflare_logpull_retention
from terrascript.resource.cloudflare.cloudflare import cloudflare_logpush_job
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_logpush_ownership_challenge,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_magic_firewall_ruleset,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_notification_policy,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_notification_policy_webhooks,
)
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_origin_ca_certificate,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_page_rule
from terrascript.resource.cloudflare.cloudflare import cloudflare_rate_limit
from terrascript.resource.cloudflare.cloudflare import cloudflare_record
from terrascript.resource.cloudflare.cloudflare import cloudflare_ruleset
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_spectrum_application,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_static_route
from terrascript.resource.cloudflare.cloudflare import cloudflare_teams_account
from terrascript.resource.cloudflare.cloudflare import cloudflare_teams_list
from terrascript.resource.cloudflare.cloudflare import cloudflare_teams_location
from terrascript.resource.cloudflare.cloudflare import cloudflare_teams_rule
from terrascript.resource.cloudflare.cloudflare import cloudflare_waf_group
from terrascript.resource.cloudflare.cloudflare import cloudflare_waf_override
from terrascript.resource.cloudflare.cloudflare import cloudflare_waf_package
from terrascript.resource.cloudflare.cloudflare import cloudflare_waf_rule
from terrascript.resource.cloudflare.cloudflare import cloudflare_waiting_room
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_worker_cron_trigger,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_worker_route
from terrascript.resource.cloudflare.cloudflare import cloudflare_worker_script
from terrascript.resource.cloudflare.cloudflare import cloudflare_workers_kv
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_workers_kv_namespace,
)
from terrascript.resource.cloudflare.cloudflare import cloudflare_zone
from terrascript.resource.cloudflare.cloudflare import cloudflare_zone_dnssec
from terrascript.resource.cloudflare.cloudflare import cloudflare_zone_lockdown
from terrascript.resource.cloudflare.cloudflare import (
cloudflare_zone_settings_override,
)
def test_datasource_import():
from terrascript.data.cloudflare.cloudflare import (
cloudflare_api_token_permission_groups,
)
from terrascript.data.cloudflare.cloudflare import cloudflare_ip_ranges
from terrascript.data.cloudflare.cloudflare import (
cloudflare_origin_ca_root_certificate,
)
from terrascript.data.cloudflare.cloudflare import cloudflare_waf_groups
from terrascript.data.cloudflare.cloudflare import cloudflare_waf_packages
from terrascript.data.cloudflare.cloudflare import cloudflare_waf_rules
from terrascript.data.cloudflare.cloudflare import cloudflare_zone_dnssec
from terrascript.data.cloudflare.cloudflare import cloudflare_zones
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.cloudflare.cloudflare
#
# t = terrascript.provider.cloudflare.cloudflare.cloudflare()
# s = str(t)
#
# assert 'https://github.com/cloudflare/terraform-provider-cloudflare' in s
# assert '3.1.0' in s
| 34.17734
| 88
| 0.80614
| 722
| 6,938
| 7.515235
| 0.192521
| 0.269075
| 0.325839
| 0.451161
| 0.849244
| 0.83192
| 0.827313
| 0.687431
| 0.191854
| 0.030593
| 0
| 0.00253
| 0.145431
| 6,938
| 202
| 89
| 34.346535
| 0.912633
| 0.075238
| 0
| 0.184211
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004951
| 0
| 1
| 0.026316
| true
| 0
| 0.631579
| 0
| 0.657895
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
d666c426f03a59e1b265e686307b4db544f5090e
| 2,378
|
py
|
Python
|
pyscf/ao2mo/test/test_restore_eri.py
|
nmardirossian/pyscf
|
57c8912dcfcc1157a822feede63df54ed1067115
|
[
"BSD-2-Clause"
] | 1
|
2018-05-02T19:55:30.000Z
|
2018-05-02T19:55:30.000Z
|
pyscf/ao2mo/test/test_restore_eri.py
|
nmardirossian/pyscf
|
57c8912dcfcc1157a822feede63df54ed1067115
|
[
"BSD-2-Clause"
] | null | null | null |
pyscf/ao2mo/test/test_restore_eri.py
|
nmardirossian/pyscf
|
57c8912dcfcc1157a822feede63df54ed1067115
|
[
"BSD-2-Clause"
] | 1
|
2018-12-06T03:10:50.000Z
|
2018-12-06T03:10:50.000Z
|
#!/usr/bin/env python
import unittest
import numpy
from pyscf import ao2mo
class KnownValues(unittest.TestCase):
def test_restore8(self):
n = 10
np = n*(n+1)//2
a8 = numpy.random.random((np*(np+1)//2))
a4 = numpy.empty((np,np))
a1 = numpy.empty((n,n,n,n))
ij = 0
for i in range(np):
for j in range(i+1):
a4[j,i] = a4[i,j] = a8[ij]
ij += 1
ij = 0
for i in range(n):
for j in range(i+1):
kl = 0
for k in range(n):
for l in range(k+1):
a1[i,j,k,l] = \
a1[j,i,k,l] = \
a1[i,j,l,k] = \
a1[j,i,l,k] = a4[ij,kl]
kl += 1
ij += 1
self.assertTrue(numpy.allclose(a1, ao2mo.restore(1, a1, n)))
self.assertTrue(numpy.allclose(a1, ao2mo.restore(1, a4, n)))
self.assertTrue(numpy.allclose(a1, ao2mo.restore(1, a8, n)))
self.assertTrue(numpy.allclose(a4, ao2mo.restore(4, a1, n)))
self.assertTrue(numpy.allclose(a4, ao2mo.restore(4, a4, n)))
self.assertTrue(numpy.allclose(a4, ao2mo.restore(4, a8, n)))
self.assertTrue(numpy.allclose(a8, ao2mo.restore(8, a1, n)))
self.assertTrue(numpy.allclose(a8, ao2mo.restore(8, a4, n)))
self.assertTrue(numpy.allclose(a8, ao2mo.restore(8, a8, n)))
def test_restore4(self):
n = 10
np = n*(n+1)//2
a4 = numpy.random.random((np,np))
a1 = numpy.empty((n,n,n,n))
ij = 0
for i in range(n):
for j in range(i+1):
kl = 0
for k in range(n):
for l in range(k+1):
a1[i,j,k,l] = \
a1[j,i,k,l] = \
a1[i,j,l,k] = \
a1[j,i,l,k] = a4[ij,kl]
kl += 1
ij += 1
self.assertTrue(numpy.allclose(a1, ao2mo.restore(1, a1, n)))
self.assertTrue(numpy.allclose(a1, ao2mo.restore(1, a4, n)))
self.assertTrue(numpy.allclose(a4, ao2mo.restore(4, a1, n)))
self.assertTrue(numpy.allclose(a4, ao2mo.restore(4, a4, n)))
if __name__ == '__main__':
print('Full Tests for ao2mo.restore')
unittest.main()
| 34.463768
| 68
| 0.470143
| 341
| 2,378
| 3.249267
| 0.143695
| 0.151625
| 0.222924
| 0.316787
| 0.809567
| 0.775271
| 0.756318
| 0.756318
| 0.732852
| 0.602888
| 0
| 0.066712
| 0.375946
| 2,378
| 68
| 69
| 34.970588
| 0.679919
| 0.00841
| 0
| 0.683333
| 0
| 0
| 0.01528
| 0
| 0
| 0
| 0
| 0
| 0.216667
| 1
| 0.033333
| false
| 0
| 0.05
| 0
| 0.1
| 0.016667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ba8757ad53b97fd55393867096a56a160b19b45c
| 10,824
|
py
|
Python
|
src/porm/parsers/mysql.py
|
DeeeFOX/porm
|
8f0b9798002128e0355508997341f85810258aee
|
[
"BSD-2-Clause"
] | 2
|
2019-08-27T10:32:22.000Z
|
2019-10-21T14:43:56.000Z
|
src/porm/parsers/mysql.py
|
DeeeFOX/porm
|
8f0b9798002128e0355508997341f85810258aee
|
[
"BSD-2-Clause"
] | null | null | null |
src/porm/parsers/mysql.py
|
DeeeFOX/porm
|
8f0b9798002128e0355508997341f85810258aee
|
[
"BSD-2-Clause"
] | 1
|
2019-11-11T14:26:56.000Z
|
2019-11-11T14:26:56.000Z
|
# -*- coding: utf-8 -*-
from porm.errors import OperationalError
class ParsedResult(dict):
def __init__(self, param: dict = None, filter: str = ''):
super(ParsedResult, self).__init__(param=param, filter=filter)
self._param = param
self._filter = filter
@property
def param(self) -> dict:
return self._param
@property
def filter(self) -> str:
return self._filter
def parse_join(**terms) -> ParsedResult:
"""
SQL JOIN条件解析接口
:param terms: 其中的LIKE条件,需要用户在外层传入具体的模糊匹配符号例如经过处理的:{"name": ("%{}%".format(name), 'LIKE')}
:return:
"""
sql_params = {}
term_sqls = ['1=1']
for field_name, term in terms.items():
# rename field name in fileter part to avoid conflict
f_fname = u'joinfltr_{}'.format(field_name)
if isinstance(term, (list, tuple)):
operator = term[1]
if len(term) == 3:
relation = u' {} '.format(term[2])
else:
relation = u' AND '
term = term[0]
if isinstance(term, (list, tuple)):
# range query
if operator in ('IN', 'NOT IN'):
in_field_names = []
for idx, s in enumerate(term):
in_field_name = f_fname + str(idx)
in_field_names.append(u'%({in_field_name})s'.format(in_field_name=in_field_name))
sql_params[in_field_name] = s
if in_field_names:
term_sql = u"{field_name} {operator} ({f_fname})".format(
field_name=field_name,
operator=operator,
f_fname=', '.join(in_field_names)
)
else:
# IF IN an empty list return []
term_sql = u'1<>1'
elif operator == 'LIKE':
like_term_sql = []
for idx, kw in enumerate(term):
kw = term[idx]
field_val_key = u"joinfltrLKE_{idx}_{fn}".format(idx=idx, fn=f_fname)
like_term_sql.append(u"{field_name} {op} %({field_val_key})s".format(
field_name=f_fname, op=operator, field_val_key=field_val_key
))
sql_params[field_val_key] = u'%{}%'.format(kw)
_tsql = relation.join(like_term_sql).strip()
if _tsql:
term_sql = u' ( {} ) '.format(_tsql)
else:
term_sql = _tsql
else:
left_val = term[0]
left_op = operator[0]
right_val = term[1]
right_op = operator[1]
range_term_sql = []
if left_val:
if left_op == '(':
left_op = '>'
elif left_op == '[':
left_op = '>='
else:
raise OperationalError(u'Invalid Operator: {}'.format(operator))
left_field_name = f_fname + left_op
range_term_sql.append(u"{field_name}{op}%({left})s".format(
field_name=field_name, op=left_op, left=left_field_name))
sql_params[left_field_name] = left_val
if right_val:
if right_op == ')':
right_op = '<'
elif right_op == ']':
right_op = '<='
else:
raise OperationalError(u'Invalid Operator: {}'.format(operator))
right_field_name = f_fname + right_op
range_term_sql.append(u"{field_name}{op}%({right})s".format(
field_name=field_name, op=right_op, right=right_field_name))
sql_params[right_field_name] = right_val
_tsql = relation.join(range_term_sql)
if _tsql:
term_sql = u' ( {} ) '.format(_tsql)
else:
term_sql = _tsql
else:
# operator query
if term is None:
continue
elif term.startswith("\\") and term.endswith("\\"):
term_sql = u"{field_name}={term}".format(
field_name=field_name, term=term.replace("\\", "")
)
else:
term_sql = u"{field_name} {operator} %({filter_field_name})s".format(
field_name=field_name, operator=operator, filter_field_name=f_fname)
sql_params[f_fname] = term
else:
if term is None:
continue
elif term.startswith("\\") and term.endswith("\\"):
term_sql = u"{field_name}={term}".format(
field_name=field_name, term=term.replace("\\", "")
)
else:
term_sql = u"{field_name}=%({filter_field_name})s".format(
field_name=field_name, filter_field_name=f_fname)
sql_params[f_fname] = term
term_sqls.append(term_sql)
filters = ' AND '.join(term_sqls)
return ParsedResult(param=sql_params, filter=filters)
def parse(tablename=None, order_by=None, page=None, size=None, **terms) -> ParsedResult:
"""
SQL条件解析接口
:param tablename:
:param order_by:
:param page:
:param size:
:param terms:
:return: {
'param': sql_params,
'filter': filters
}
"""
sql_params = {}
term_sqls = ['1=1']
for fname, term in list(terms.items()):
# rename field name in filter part to avoid conflict
f_fname = u'fltr_{}'.format(fname)
fname = u'{}.{}'.format(tablename, fname) if tablename else fname
if isinstance(term, (list, tuple)):
operator = term[1].strip()
if len(term) == 3:
relation = u' {} '.format(term[2])
else:
relation = u' AND '
term = term[0]
term_sql = u''
if isinstance(term, (list, tuple)):
# range query
if operator in ('IN', 'NOT IN'):
in_field_names = []
for idx, s in enumerate(term):
in_field_name = f_fname + str(idx)
in_field_names.append(u'%({in_field_name})s'.format(in_field_name=in_field_name))
sql_params[in_field_name] = s
if in_field_names:
term_sql = u"{field_name} {operator} ({f_fname})".format(
field_name=fname,
operator=operator,
f_fname=', '.join(in_field_names)
)
else:
# IF IN an empty list return []
term_sql = u'1<>1'
elif operator == 'LIKE':
like_term_sql = []
for idx, kw in enumerate(term):
kw = term[idx]
field_val_key = u"LKE_{idx}_{fn}".format(idx=idx, fn=fname)
like_term_sql.append(u"{field_name} {op} %({field_val_key})s".format(
field_name=fname, op=operator, field_val_key=field_val_key
))
sql_params[field_val_key] = u'%{}%'.format(kw)
_tsql = relation.join(like_term_sql).strip()
if _tsql:
term_sql = ' ( {} ) '.format(_tsql)
else:
term_sql = _tsql
else:
left_val = term[0]
left_op = operator[0]
right_val = term[1]
right_op = operator[1]
range_term_sql = []
if left_val:
if left_op == '(':
left_op = '>'
elif left_op == '[':
left_op = '>='
else:
raise OperationalError(u'Invalid Operator: {}'.format(operator))
left_field_name = f_fname + left_op
range_term_sql.append(u"{field_name}{op}%({left})s".format(
field_name=fname, op=left_op, left=left_field_name))
sql_params[left_field_name] = left_val
if right_val:
if right_op == ')':
right_op = '<'
elif right_op == ']':
right_op = '<='
else:
raise OperationalError(u'Invalid Operator: {}'.format(operator))
right_field_name = f_fname + right_op
range_term_sql.append(u"{field_name}{op}%({right})s".format(
field_name=fname, op=right_op, right=right_field_name))
sql_params[right_field_name] = right_val
_tsql = relation.join(range_term_sql).strip()
if _tsql:
term_sql = ' ( {} ) '.format(_tsql)
else:
term_sql = _tsql
else:
# operator query
if term is None:
continue
term_sql = u"{field_name} {operator} %({filter_field_name})s".format(
field_name=fname, operator=operator, filter_field_name=f_fname)
sql_params[f_fname] = term
else:
if term is None:
continue
term_sql = u"{field_name}=%({filter_field_name})s".format(
field_name=fname, filter_field_name=f_fname)
sql_params[f_fname] = term
if term_sql:
term_sqls.append(term_sql)
else:
pass
filters = ' AND '.join(term_sqls)
if order_by:
filters = u'{} ORDER BY {}'.format(filters, order_by)
if page is not None and size is not None:
size = max(1, int(size))
filters = u'{} LIMIT %(page_from)s, %(page_to)s'.format(filters)
sql_params['page_from'] = (max(0, int(page) - 1)) * size
sql_params['page_to'] = size
return ParsedResult(param=sql_params, filter=filters)
| 43.821862
| 105
| 0.454361
| 1,105
| 10,824
| 4.167421
| 0.092308
| 0.134853
| 0.04886
| 0.035831
| 0.813898
| 0.789359
| 0.766124
| 0.71987
| 0.701194
| 0.697286
| 0
| 0.004614
| 0.439302
| 10,824
| 246
| 106
| 44
| 0.75416
| 0.045639
| 0
| 0.768868
| 0
| 0
| 0.08178
| 0.024007
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023585
| false
| 0.004717
| 0.004717
| 0.009434
| 0.051887
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bae0f932d83f0f837827ee04021df68ec26e032c
| 80
|
py
|
Python
|
python/baseline/dy/seq2seq/__init__.py
|
bjayakumar/test_vendor
|
e32c1a69754cedcec46d3e76e43a72743ebb8ed8
|
[
"Apache-2.0"
] | null | null | null |
python/baseline/dy/seq2seq/__init__.py
|
bjayakumar/test_vendor
|
e32c1a69754cedcec46d3e76e43a72743ebb8ed8
|
[
"Apache-2.0"
] | null | null | null |
python/baseline/dy/seq2seq/__init__.py
|
bjayakumar/test_vendor
|
e32c1a69754cedcec46d3e76e43a72743ebb8ed8
|
[
"Apache-2.0"
] | null | null | null |
from baseline.dy.seq2seq.train import *
from baseline.dy.seq2seq.model import *
| 26.666667
| 39
| 0.8
| 12
| 80
| 5.333333
| 0.583333
| 0.375
| 0.4375
| 0.65625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.1
| 80
| 2
| 40
| 40
| 0.861111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
baec76ebfa38141b9922b4a889892afa89c0a005
| 158
|
py
|
Python
|
libterrain/__init__.py
|
echerost/libterrain
|
7ff77231fb296c97bd6009c5bcbe897d6b7adb36
|
[
"MIT"
] | null | null | null |
libterrain/__init__.py
|
echerost/libterrain
|
7ff77231fb296c97bd6009c5bcbe897d6b7adb36
|
[
"MIT"
] | null | null | null |
libterrain/__init__.py
|
echerost/libterrain
|
7ff77231fb296c97bd6009c5bcbe897d6b7adb36
|
[
"MIT"
] | 3
|
2019-05-27T10:18:06.000Z
|
2019-07-13T19:30:38.000Z
|
from .terrain_interface import SingleTerrainInterface, ParallelTerrainInterface
from .building_interface import OSMInterface, CTRInterface, BuildingInterface
| 52.666667
| 79
| 0.898734
| 13
| 158
| 10.769231
| 0.769231
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06962
| 158
| 2
| 80
| 79
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
030322ad7e7841740104deb82816a504e218fc16
| 177
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/ocelot/calculators/calc_legacy_vars.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/ocelot/calculators/calc_legacy_vars.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/ocelot/calculators/calc_legacy_vars.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.common.calculators.calc_profile_base_beta1 import CALC_Profile_Base
class Calc_Legacy_Vars_Ocelot(CALC_Profile_Base):
#Inherit all for now
pass
| 35.4
| 92
| 0.847458
| 26
| 177
| 5.384615
| 0.730769
| 0.235714
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006329
| 0.107345
| 177
| 5
| 93
| 35.4
| 0.879747
| 0.107345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
032fd0f0e7043969e6261cbac8c58c3854182e99
| 6,739
|
py
|
Python
|
dashboard/tests/test_forms.py
|
Tomasz-Kluczkowski/Bnice
|
75eb66a94a3bf3225691ed6802e674fbcf108571
|
[
"MIT"
] | null | null | null |
dashboard/tests/test_forms.py
|
Tomasz-Kluczkowski/Bnice
|
75eb66a94a3bf3225691ed6802e674fbcf108571
|
[
"MIT"
] | 60
|
2018-04-20T21:32:21.000Z
|
2021-09-07T23:53:31.000Z
|
dashboard/tests/test_forms.py
|
Tomasz-Kluczkowski/Bnice
|
75eb66a94a3bf3225691ed6802e674fbcf108571
|
[
"MIT"
] | null | null | null |
# Fixtures are kept in dashboard/tests/conftest.py.
import pytest
from dashboard.models import Oopsy, Smiley
@pytest.mark.django_db
def test_add_smiley_form_with_valid_data(add_smiley_form_set_description):
form = add_smiley_form_set_description
assert form.is_valid() is True
# Confirm manually set field attributes are ok.
assert form.fields['description'].help_text == 'Required'
assert form.fields['new_description'].help_text == 'Required, Create a new description'
assert form.fields['points'].help_text == 'Required, How much was this task worth?'
@pytest.mark.django_db
def test_add_smiley_form_clean(smiley_form_new_description_saved):
"""Confirms new description overrides description."""
form = smiley_form_new_description_saved
assert form.clean() == {'description': 'New description', 'new_description': 'New description', 'points': 3}
@pytest.mark.django_db
def test_add_smiley_form_standard_choices(smiley_form_new_description_saved):
"""Confirm standard choices are used for description field."""
form = smiley_form_new_description_saved
choices = form.fields['description'].choices
assert choices == [('Add new',
'Add new'),
('Folded washing',
'Folded washing'),
('Cleaned bathroom',
'Cleaned bathroom'),
('Mopped floor',
'Mopped floor'),
('Removed cutlery from dishwasher',
'Removed cutlery from dishwasher')]
@pytest.mark.django_db
def test_add_smiley_form_adds_choices(smiley_custom_description, smiley_form_new_description_saved):
"""Check if new description is added to the description field's choices."""
# Create Smiley object with a custom description.
form = smiley_form_new_description_saved
choices = form.fields['description'].choices
assert choices == [('Add new',
'Add new'),
('Folded washing',
'Folded washing'),
('Cleaned bathroom',
'Cleaned bathroom'),
('Mopped floor',
'Mopped floor'),
('Removed cutlery from dishwasher',
'Removed cutlery from dishwasher'),
('Removed rubbish',
'Removed rubbish')]
@pytest.mark.django_db
def test_add_smiley_form_adds_only_distinct_choices(smileys_with_same_description, smiley_form_new_description_saved):
# Confirm that the new description is added to the description choices
# only once (we have 5 Smiley objects from smileys_with_same_description
# fixture and one from the form.
form = smiley_form_new_description_saved
choices = form.fields['description'].choices
assert choices == [('Add new',
'Add new'),
('Folded washing',
'Folded washing'),
('Cleaned bathroom',
'Cleaned bathroom'),
('Mopped floor',
'Mopped floor'),
('Removed cutlery from dishwasher',
'Removed cutlery from dishwasher'),
('Removed rubbish',
'Removed rubbish')]
assert Smiley.objects.count() == 6
# Tests for Oopsy forms.
@pytest.mark.django_db
def test_add_oopsy_form_with_valid_data(add_oopsy_form_set_description):
form = add_oopsy_form_set_description
assert form.is_valid() is True
# Confirm manually set field attributes are ok.
assert form.fields['description'].help_text == 'Required'
assert form.fields['new_description'].help_text == 'Required, Create a new description'
assert form.fields['points'].help_text == 'Required, How many points to take away?'
@pytest.mark.django_db
def test_add_oopsy_form_clean(oopsy_form_new_description_saved):
"""Confirms new description overrides description."""
form = oopsy_form_new_description_saved
assert form.clean() == {'description': 'New description', 'new_description': 'New description', 'points': 3}
@pytest.mark.django_db
def test_add_oopsy_form_standard_choices(oopsy_form_new_description_saved):
"""Confirm standard choices are used for description field."""
form = oopsy_form_new_description_saved
choices = form.fields['description'].choices
assert choices == [('Add new',
'Add new'),
('Was lying',
'Was lying'),
('Left mess',
'Left mess'),
('Talked back to parent',
'Talked back to parent'),
("Didn't do homework",
"Didn't do homework")]
@pytest.mark.django_db
def test_add_oopsy_form_adds_choices(oopsy_custom_description, oopsy_form_new_description_saved):
"""Check if new description is added to the description field's choices."""
# Create Smiley object with a custom description.
form = oopsy_form_new_description_saved
choices = form.fields['description'].choices
assert choices == [('Add new',
'Add new'),
('Was lying',
'Was lying'),
('Left mess',
'Left mess'),
('Talked back to parent',
'Talked back to parent'),
("Didn't do homework",
"Didn't do homework"),
('Was rude',
'Was rude')]
@pytest.mark.django_db
def test_add_oopsy_form_adds_only_distinct_choices(oopsies_with_same_description, oopsy_form_new_description_saved):
# Confirm that the new description is added to the description choices
# only once (we have 5 Oopsy objects from oopsies_with_same_description
# fixture and one from the form.
form = oopsy_form_new_description_saved
choices = form.fields['description'].choices
assert choices == [('Add new',
'Add new'),
('Was lying',
'Was lying'),
('Left mess',
'Left mess'),
('Talked back to parent',
'Talked back to parent'),
("Didn't do homework",
"Didn't do homework"),
('Was rude',
'Was rude')]
assert Oopsy.objects.count() == 6
| 42.651899
| 118
| 0.585102
| 718
| 6,739
| 5.252089
| 0.149025
| 0.118801
| 0.076372
| 0.097587
| 0.907186
| 0.866614
| 0.846195
| 0.843278
| 0.843278
| 0.798462
| 0
| 0.001318
| 0.324232
| 6,739
| 157
| 119
| 42.923567
| 0.826746
| 0.141267
| 0
| 0.830508
| 0
| 0
| 0.238816
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 1
| 0.084746
| false
| 0
| 0.016949
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03440093aec51b514008b7fa3a10d24e6817ee5a
| 2,694
|
py
|
Python
|
authentication/forms.py
|
cativo23/Proyecto_API
|
6cc93f4d15e4c83aae989fb2b305475a0339a85a
|
[
"MIT"
] | null | null | null |
authentication/forms.py
|
cativo23/Proyecto_API
|
6cc93f4d15e4c83aae989fb2b305475a0339a85a
|
[
"MIT"
] | 4
|
2020-06-06T00:35:21.000Z
|
2021-06-09T18:41:20.000Z
|
authentication/forms.py
|
cativo23/Proyecto_API
|
6cc93f4d15e4c83aae989fb2b305475a0339a85a
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.models import User, Group
from material import Layout, Fieldset, Row
class UsuarioForm(forms.Form):
username = forms.CharField(error_messages={'required': 'Campo obligatorio'}, min_length=8, max_length=30,
label='Nombre de Usuario', help_text='Al menos 8 caracteres.')
first_name = forms.CharField(error_messages={'required': 'Campo obligatorio'}, label='Nombre')
last_name = forms.CharField(error_messages={'required': 'Campo obligatorio'}, label='Apellidos')
email = forms.EmailField(error_messages={'required': 'Campo obligatorio'}, label='Dirección de Correo Electrónico')
try:
groups = forms.ChoiceField(choices=[(x.id, x.name) for x in Group.objects.all()], label='Perfil de Acceso',
help_text='Por favor seleccione el Perfil de Acceso que tendrá este Usuario')
except:
pass
layout = Layout(Fieldset('Agregar Usuario: '), Row('username', 'email'), Row('first_name', 'last_name'),
Row('groups'))
def clean_email(self):
email = self.cleaned_data['email'].lower()
username = self.cleaned_data.get('username')
if email and User.objects.filter(email=email).exclude(username=username).exists():
raise forms.ValidationError("¡Ya existe un usuario con ese email!")
return email
class UsuarioUpdateForm(forms.Form):
username = forms.CharField(error_messages={'required': 'Campo obligatorio'}, min_length=8, max_length=30, label='Nombre de Usuario', widget=forms.TextInput(attrs={'readonly':'readonly'}))
first_name = forms.CharField(error_messages={'required': 'Campo obligatorio'}, label='Nombre')
last_name = forms.CharField(error_messages={'required': 'Campo obligatorio'}, label='Apellidos')
email = forms.EmailField(error_messages={'required': 'Campo obligatorio'}, label='Dirección de Correo Electrónico')
# Para solucionar error en las migraciones iniciales
try:
groups = forms.ChoiceField(choices=[(x.id, x.name) for x in Group.objects.all()], label='Perfil de Acceso', help_text='Por favor seleccione el Perfil de Acceso que tendrá este Usuario')
except:
pass
layout = Layout(Fieldset('Agregar Usuario: '), Row('username', 'email'), Row('first_name', 'last_name'), Row('groups'))
def clean_email(self):
email = self.cleaned_data['email'].lower()
username = self.cleaned_data.get('username')
if email and User.objects.filter(email=email).exclude(username=username).exists():
raise forms.ValidationError("¡Ya existe un usuario con ese email!")
return email
| 54.979592
| 193
| 0.685226
| 331
| 2,694
| 5.495468
| 0.302115
| 0.057174
| 0.092358
| 0.114349
| 0.864211
| 0.864211
| 0.864211
| 0.864211
| 0.864211
| 0.864211
| 0
| 0.003177
| 0.182257
| 2,694
| 48
| 194
| 56.125
| 0.821607
| 0.01856
| 0
| 0.631579
| 0
| 0
| 0.277063
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0.052632
| 0.078947
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0357e6d6b21426987555b08f2d03f5de8ce89baf
| 6,256
|
py
|
Python
|
Python/5kyu/0001rooksAndBishops/rb.py
|
AndrewMichaelPierce/Codewars
|
6e2fa0870753428f5d957331bad07c64f8869281
|
[
"MIT"
] | null | null | null |
Python/5kyu/0001rooksAndBishops/rb.py
|
AndrewMichaelPierce/Codewars
|
6e2fa0870753428f5d957331bad07c64f8869281
|
[
"MIT"
] | null | null | null |
Python/5kyu/0001rooksAndBishops/rb.py
|
AndrewMichaelPierce/Codewars
|
6e2fa0870753428f5d957331bad07c64f8869281
|
[
"MIT"
] | null | null | null |
def main():
def bishops_and_rooks(chessboard):
rook = []
bishop = []
blankBoard = [
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 21, 22, 23, 24, 25, 26, 27, 28, 200,
200, 31, 32, 33, 34, 35, 36, 37, 38, 200,
200, 41, 42, 43, 44, 45, 46, 47, 48, 200,
200, 51, 52, 53, 54, 55, 56, 57, 58, 200,
200, 61, 62, 63, 64, 65, 66, 67, 68, 200,
200, 71, 72, 73, 74, 75, 76, 77, 78, 200,
200, 81, 82, 83, 84, 85, 86, 87, 88, 200,
200, 91, 92, 93, 94, 95, 96, 97, 98, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200
]
activePosition = [
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 21, 22, 23, 24, 25, 26, 27, 28, 200,
200, 31, 32, 33, 34, 35, 36, 37, 38, 200,
200, 41, 42, 43, 44, 45, 46, 47, 48, 200,
200, 51, 52, 53, 54, 55, 56, 57, 58, 200,
200, 61, 62, 63, 64, 65, 66, 67, 68, 200,
200, 71, 72, 73, 74, 75, 76, 77, 78, 200,
200, 81, 82, 83, 84, 85, 86, 87, 88, 200,
200, 91, 92, 93, 94, 95, 96, 97, 98, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200
]
whiteInfluence = [
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 21, 22, 23, 24, 25, 26, 27, 28, 200,
200, 31, 32, 33, 34, 35, 36, 37, 38, 200,
200, 41, 42, 43, 44, 45, 46, 47, 48, 200,
200, 51, 52, 53, 54, 55, 56, 57, 58, 200,
200, 61, 62, 63, 64, 65, 66, 67, 68, 200,
200, 71, 72, 73, 74, 75, 76, 77, 78, 200,
200, 81, 82, 83, 84, 85, 86, 87, 88, 200,
200, 91, 92, 93, 94, 95, 96, 97, 98, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200,
200, 200, 200, 200, 200, 200, 200, 200, 200, 200
]
def findPiece(chessboard):
for column in range(8):
for row in range(8):
if chessboard[column][row] > 0:
rook.append(str(((column + 2) * 10) + (row + 1)))
activePosition[((column + 2) * 10) + (row + 1)] = "R"
whiteInfluence[((column + 2) * 10) + (row + 1)] = "x"
elif chessboard[column][row] < 0:
bishop.append(str(((column + 2) * 10) + (row + 1)))
activePosition[((column + 2) * 10) + (row + 1)] = "B"
whiteInfluence[((column + 2) * 10) + (row + 1)] = "x"
def rookPositions(rookLocation):
location = int(rookLocation)
while location == blankBoard[location]:
if location + 10 == activePosition[location + 10]:
whiteInfluence[location + 10] = "x"
else:
break
location += 10
location = int(rookLocation)
while location == blankBoard[location]:
if location - 10 == activePosition[location - 10]:
whiteInfluence[location - 10] = "x"
else:
break
location -= 10
location = int(rookLocation)
while location == blankBoard[location]:
if location + 1 == activePosition[location + 1]:
whiteInfluence[location + 1] = "x"
else:
break
location += 1
location = int(rookLocation)
while location == blankBoard[location]:
if location - 1 == activePosition[location - 1]:
whiteInfluence[location - 1] = "x"
else:
break
location -= 1
def bishopPositions(bishopLocation):
location = int(bishopLocation)
while location == blankBoard[location]:
if location + 11 == activePosition[location + 11]:
whiteInfluence[location + 11] = "x"
else:
break
location += 11
location = int(bishopLocation)
while location == blankBoard[location]:
if location - 11 == activePosition[location - 11]:
whiteInfluence[location - 11] = "x"
else:
break
location -= 11
location = int(bishopLocation)
while location == blankBoard[location]:
if location + 9 == activePosition[location + 9]:
whiteInfluence[location + 9] = "x"
else:
break
location += 9
while location == blankBoard[location]:
if location - 9 == activePosition[location - 9]:
whiteInfluence[location - 9] = "x"
else:
break
location -= 9
def calculateInfluence():
for piece in range(len(rook)):
rookPositions(rook[piece])
for piece in range(len(bishop)):
bishopPositions(bishop[piece])
def countUnattacked(influenceBoard):
squareLocation = 21
result = 0
while squareLocation < 99:
if influenceBoard[squareLocation] != "x" and influenceBoard[squareLocation] != 200:
result += 1
squareLocation += 1
return result
findPiece(chessboard)
calculateInfluence()
result = countUnattacked(whiteInfluence)
print(result)
return result
bishops_and_rooks([
[1,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,-1,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0]])
if __name__ == "__main__":
main()
# Status: Solved
| 39.345912
| 99
| 0.453485
| 737
| 6,256
| 3.833107
| 0.160109
| 0.299469
| 0.363186
| 0.458761
| 0.772035
| 0.759292
| 0.759292
| 0.739469
| 0.739469
| 0.739469
| 0
| 0.283142
| 0.414003
| 6,256
| 158
| 100
| 39.594937
| 0.487452
| 0.002238
| 0
| 0.531469
| 0
| 0
| 0.003365
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048951
| false
| 0
| 0
| 0
| 0.062937
| 0.006993
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
03652f6bcbfb5491adf02c9c7954b9140165dc05
| 10,491
|
py
|
Python
|
tests/unit/protocols/test_swd.py
|
dia38/pylink
|
f882f71ffdbcafe262036d1ec31e95780cbbb20d
|
[
"Apache-2.0"
] | 217
|
2017-10-04T21:00:13.000Z
|
2022-03-26T07:09:36.000Z
|
tests/unit/protocols/test_swd.py
|
dia38/pylink
|
f882f71ffdbcafe262036d1ec31e95780cbbb20d
|
[
"Apache-2.0"
] | 115
|
2017-09-26T23:23:08.000Z
|
2022-03-29T14:24:39.000Z
|
tests/unit/protocols/test_swd.py
|
dia38/pylink
|
f882f71ffdbcafe262036d1ec31e95780cbbb20d
|
[
"Apache-2.0"
] | 112
|
2017-10-11T18:43:21.000Z
|
2022-03-23T19:13:24.000Z
|
# Copyright 2017 Square, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pylink.protocols.swd as swd
import pylink.util
import mock
import unittest
class TestSerialWireDebug(unittest.TestCase):
"""Tests the `protocols.swd` submodule."""
def setUp(self):
"""Called before each test.
Performs setup.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
pass
def tearDown(self):
"""Called after each test.
Performs teardown.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
pass
def test_swd_response_ack(self):
"""Tests creating an ACK response.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
response = swd.Response(swd.Response.STATUS_ACK)
self.assertTrue(response.ack())
self.assertFalse(response.wait())
self.assertFalse(response.fault())
self.assertFalse(response.invalid())
def test_swd_response_wait(self):
"""Tests creating a WAIT response.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
response = swd.Response(swd.Response.STATUS_WAIT)
self.assertTrue(response.wait())
self.assertFalse(response.ack())
self.assertFalse(response.fault())
self.assertFalse(response.invalid())
def test_swd_response_fault(self):
"""Tests creating a FAULT response.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
response = swd.Response(swd.Response.STATUS_FAULT)
self.assertTrue(response.fault())
self.assertFalse(response.wait())
self.assertFalse(response.ack())
self.assertFalse(response.invalid())
def test_swd_response_invalid(self):
"""Tests creating an invalid response.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
response = swd.Response(swd.Response.STATUS_INVALID)
self.assertTrue(response.invalid())
self.assertFalse(response.fault())
self.assertFalse(response.wait())
self.assertFalse(response.ack())
def test_swd_read_request_initialize(self):
"""Tests creating a SWD Read Request.
When a SWD Read Request is created, there is a structure specifying
what the underlying bits should look like. This test verifies a
number of different valid bitfields.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
values = [165, 141, 149, 189, 165]
for (index, value) in enumerate(values):
request = swd.ReadRequest(index, ap=False)
self.assertEqual(value, request.value)
values = [135, 175, 183, 159, 135]
for (index, value) in enumerate(values):
request = swd.ReadRequest(index, ap=True)
self.assertEqual(value, request.value)
def test_swd_read_request_send_nack(self):
"""Tests sending a SWD Read Request that is NACK'd.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
request = swd.ReadRequest(0, True)
ack = 1
status = swd.Response.STATUS_WAIT
data = 2
mock_jlink = mock.Mock()
mock_jlink.swd_write.return_value = ack
mock_jlink.swd_read8.return_value = status
mock_jlink.swd_read32.return_value = data
response = request.send(mock_jlink)
self.assertFalse(response.ack())
self.assertTrue(response.wait())
self.assertEqual(2, mock_jlink.swd_write8.call_count)
mock_jlink.swd_write8.assert_any_call(0xFF, request.value) # data command
mock_jlink.swd_write8.assert_any_call(0xFC, 0x0) # status command
self.assertEqual(1, mock_jlink.swd_write32.call_count)
mock_jlink.swd_write32.assert_any_call(0x0, 0x0)
self.assertEqual(1, mock_jlink.swd_write.call_count)
mock_jlink.swd_write.assert_any_call(0x0, 0x0, 3) # ack
self.assertEqual(1, mock_jlink.swd_read8.call_count)
mock_jlink.swd_read8.assert_any_call(ack) # status read
self.assertEqual(1, mock_jlink.swd_read32.call_count)
mock_jlink.swd_read32.assert_any_call(ack + 3) # data read
def test_swd_read_request_send_ack(self):
"""Tests sending a SWD Read Request that is ACK'd.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
request = swd.ReadRequest(0, True)
ack = 1
status = swd.Response.STATUS_ACK
data = 1
mock_jlink = mock.Mock()
mock_jlink.swd_write.return_value = ack
mock_jlink.swd_read8.return_value = status
mock_jlink.swd_read32.return_value = data
response = request.send(mock_jlink)
self.assertTrue(response.ack())
self.assertEqual(2, mock_jlink.swd_write8.call_count)
mock_jlink.swd_write8.assert_any_call(0xFF, request.value) # data command
mock_jlink.swd_write8.assert_any_call(0xFC, 0x0) # status command
self.assertEqual(1, mock_jlink.swd_write32.call_count)
mock_jlink.swd_write32.assert_any_call(0x0, 0x0)
self.assertEqual(1, mock_jlink.swd_write.call_count)
mock_jlink.swd_write.assert_any_call(0x0, 0x0, 3) # ack
self.assertEqual(2, mock_jlink.swd_read8.call_count)
mock_jlink.swd_read8.assert_any_call(ack) # status read
mock_jlink.swd_read8.assert_any_call(ack + 35) # parity check
self.assertEqual(1, mock_jlink.swd_read32.call_count)
mock_jlink.swd_read32.assert_any_call(ack + 3) # data read
def test_swd_read_request_send_ack_parity_mismatch(self):
"""Tests sending a SWD Request that is ACK'd, but the parity is wrong.
When a SWD Read Request reads data from the target, their is a parity
field that is set, and can be is to verify that the data is valid. In
this test, the parity check fails.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
request = swd.ReadRequest(0, True)
ack = 1
status = swd.Response.STATUS_ACK
data = 3
mock_jlink = mock.Mock()
mock_jlink.swd_write.return_value = ack
mock_jlink.swd_read8.return_value = status
mock_jlink.swd_read32.return_value = data
response = request.send(mock_jlink)
self.assertFalse(response.ack())
self.assertTrue(response.invalid())
self.assertEqual(2, mock_jlink.swd_write8.call_count)
mock_jlink.swd_write8.assert_any_call(0xFF, request.value) # data command
mock_jlink.swd_write8.assert_any_call(0xFC, 0x0) # status command
self.assertEqual(1, mock_jlink.swd_write32.call_count)
mock_jlink.swd_write32.assert_any_call(0x0, 0x0)
self.assertEqual(1, mock_jlink.swd_write.call_count)
mock_jlink.swd_write.assert_any_call(0x0, 0x0, 3) # ack
self.assertEqual(2, mock_jlink.swd_read8.call_count)
mock_jlink.swd_read8.assert_any_call(ack) # status read
mock_jlink.swd_read8.assert_any_call(ack + 35) # parity check
self.assertEqual(1, mock_jlink.swd_read32.call_count)
mock_jlink.swd_read32.assert_any_call(ack + 3) # data read
def test_swd_write_request_initialize(self):
"""Tests creating a SWD Write Request.
When a SWD Write Request is created, there is a structure specifying
what the underlying bits should look like. This test verifies a number
of different valid bitfields.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
data = 4
values = [129, 169, 177, 153]
for (index, value) in enumerate(values):
request = swd.WriteRequest(index, data=data, ap=False)
self.assertEqual(value, request.value)
values = [163, 139, 147, 187]
for (index, value) in enumerate(values):
request = swd.WriteRequest(index, data=data, ap=True)
self.assertEqual(value, request.value)
def test_swd_write_request_send(self):
"""Tests sending a SWD Read Request.
Args:
self (TestSerialWireDebug): the `TestSerialWireDebug` instance
Returns:
`None`
"""
data = 2
parity = pylink.util.calculate_parity(data)
request = swd.WriteRequest(0, True, data)
ack = 2
mock_jlink = mock.Mock()
mock_jlink.swd_write.return_value = ack
mock_jlink.swd_read8.return_value = 1
response = request.send(mock_jlink)
self.assertTrue(response.ack())
self.assertEqual(2, mock_jlink.swd_write.call_count)
mock_jlink.swd_write.assert_any_call(0x0, 0x0, 3) # Ack
mock_jlink.swd_write.assert_any_call(0x0, 0x0, 2) # Turnaround
self.assertEqual(2, mock_jlink.swd_write8.call_count)
mock_jlink.swd_write8.assert_any_call(0xFF, request.value)
mock_jlink.swd_write8.assert_any_call(0xFF, parity)
self.assertEqual(1, mock_jlink.swd_write32.call_count)
mock_jlink.swd_write32.assert_called_once_with(0xFFFFFFFF, data)
self.assertEqual(1, mock_jlink.swd_read8.call_count)
mock_jlink.swd_read8.assert_called_once_with(ack)
if __name__ == '__main__':
unittest.main()
| 32.28
| 82
| 0.653131
| 1,296
| 10,491
| 5.085648
| 0.146605
| 0.087392
| 0.101957
| 0.051889
| 0.809437
| 0.781975
| 0.7797
| 0.757093
| 0.734942
| 0.697466
| 0
| 0.027464
| 0.257268
| 10,491
| 324
| 83
| 32.37963
| 0.818403
| 0.27557
| 0
| 0.6875
| 0
| 0
| 0.001156
| 0
| 0
| 0
| 0.0143
| 0
| 0.493056
| 1
| 0.083333
| false
| 0.013889
| 0.027778
| 0
| 0.118056
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cef9bee62fcb483f58130e27dfc3e7f73277745e
| 27,638
|
py
|
Python
|
models/pointnet_cls.py
|
jiameng1010/pointNet
|
17d230f46f64136baba2c3d6cb7f05ab4bbb9f31
|
[
"MIT"
] | null | null | null |
models/pointnet_cls.py
|
jiameng1010/pointNet
|
17d230f46f64136baba2c3d6cb7f05ab4bbb9f31
|
[
"MIT"
] | null | null | null |
models/pointnet_cls.py
|
jiameng1010/pointNet
|
17d230f46f64136baba2c3d6cb7f05ab4bbb9f31
|
[
"MIT"
] | 1
|
2019-02-03T12:19:36.000Z
|
2019-02-03T12:19:36.000Z
|
import tensorflow as tf
import numpy as np
import math
import sys
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR, '../utils'))
import tf_util
from transform_nets import input_transform_net, feature_transform_net, input_transform_net_no_bn
def placeholder_inputs(batch_size, num_point):
pointclouds_pl = tf.placeholder(tf.float32, shape=(batch_size, num_point, 3))
labels_pl = tf.placeholder(tf.int32, shape=(batch_size))
return pointclouds_pl, labels_pl
def get_model_rbf0(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
point_cloud_transformed = tf.expand_dims(point_cloud_transformed, 3)
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
centroids = tf.get_variable('centroids',
[1, 1, 3, 1024],
initializer=tf.constant_initializer(0.2*np.random.randn(1, 1, 3, 1024)),
dtype=tf.float32)
feature = tf.tile(point_cloud_transformed, [1, 1, 1, 1024])
bias = tf.tile(centroids, [batch_size, 1024, 1, 1])
net = tf.subtract(feature, bias)
net = tf.norm(net, axis=2, keep_dims=True)
net = tf.exp(-net)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
#net = tf_util.fully_connected(net, 1024, bn=True, is_training=is_training,
# scope='fc0', bn_decay=bn_decay)
#net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
# scope='dp1')
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 128, bn=True, is_training=is_training,
scope='fc3', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp3')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc4')
return net, end_points, features, centroids
def get_model_rbf0_gan(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net_no_bn(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
point_cloud_transformed = tf.expand_dims(point_cloud_transformed, 3)
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
centroids = tf.get_variable('centroids',
[1, 1, 3, 1024],
initializer=tf.constant_initializer(0.2*np.random.randn(1, 1, 3, 1024)),
dtype=tf.float32)
feature = tf.tile(point_cloud_transformed, [1, 1, 1, 1024])
bias = tf.tile(centroids, [batch_size, 1024, 1, 1])
net = tf.subtract(feature, bias)
net = tf.norm(net, axis=2, keep_dims=True)
net = tf.exp(-net)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
#net = tf_util.fully_connected(net, 1024, bn=True, is_training=is_training,
# scope='fc0', bn_decay=bn_decay)
#net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
# scope='dp1')
net = tf_util.fully_connected(net, 512, bn=False, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=False, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 41, activation_fn=None, scope='fc3')
return net, end_points, features, centroids
def get_model_rbf(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
point_cloud_transformed = tf.expand_dims(point_cloud_transformed, 3)
c1 = 1024
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
centroids = tf.get_variable('centroids',
[1, 1, 3, c1],
initializer=tf.constant_initializer(0.5*np.random.randn(1, 1, 3, c1)),
dtype=tf.float32)
#the per-centroids weights to change the shape of the multi-norm
weights = tf.get_variable('weights',
[1, 1, 4, c1],
initializer=tf.constant_initializer(0.01 * np.random.randn(1, 1, 3, c1)),)
feature = tf.tile(point_cloud_transformed, [1, 1, 1, c1])
bias = tf.tile(centroids, [batch_size, num_point, 1, 1])
net = tf.subtract(feature, bias)
net = tf.exp(net)
net = tf.exp(-tf.concat([tf.norm(net, ord=0.5, axis=2, keep_dims=True),
#tf.norm(net, ord=0.8, axis=2, keep_dims=True),
tf.norm(net, ord=1, axis=2, keep_dims=True),
#tf.norm(net, ord=1.5, axis=2, keep_dims=True),
tf.norm(net, ord=2, axis=2, keep_dims=True),
#tf.norm(net, ord=3, axis=2, keep_dims=True),
#tf.norm(net, ord=4, axis=2, keep_dims=True),
tf.norm(net, ord=np.inf, axis=2, keep_dims=True),
], axis=2))
net = tf.multiply(net, tf.tile(weights, [batch_size, num_point, 1, 1]))
#net = tf.exp(-net)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.transpose(features, perm=[0, 1, 3, 2])
net = tf_util.conv2d(net, 3, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='mini_conv1', bn_decay=bn_decay)
net = tf.reshape(net, [batch_size, -1])
#net = tf_util.fully_connected(net, 1024, bn=True, is_training=is_training,
# scope='fc0', bn_decay=bn_decay)
#net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
# scope='dp1')
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features, centroids
def get_model_rbf2(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
point_cloud_transformed = tf.expand_dims(point_cloud_transformed, 3)
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
c1 = 128
c2 = 32
centroids = tf.get_variable('centroids',
[1, 1, 3, c1],
initializer=tf.constant_initializer(np.random.randn(1, 1, 3, c1)),
dtype=tf.float32)
sub_centroids = tf.get_variable('sub_centroids',
[1, 1, 3, c2],
initializer=tf.constant_initializer(0.05*np.random.randn(1, 1, 3, c2)),
dtype=tf.float32)
#sub_centroids = tf.constant(0.05*np.random.randn(1, 1, 3, c2), dtype=tf.float32)
sub_bias = tf.add(tf.tile(tf.expand_dims(sub_centroids, 4), [1, 1, 1, 1, c1]),
tf.tile(tf.expand_dims(centroids, 3), [1, 1, 1, c2, 1]))
sub_bias = tf.tile(sub_bias, [32, 1024, 1, 1, 1])
sub_feature = tf.tile(tf.expand_dims(point_cloud_transformed, 4), [1, 1, 1, c2, c1])
#feature = tf.tile(point_cloud_transformed, [1, 1, 1, c2, c1])
#bias = tf.tile(centroids, [32, 1024, 1, 1])
sub_net = tf.subtract(sub_feature, sub_bias)
sub_net = tf.norm(sub_net, axis=2, keep_dims=True)
sub_net = tf.reshape(sub_net, [32, 1024, 1, -1])
net = tf.exp(-tf.square(sub_net))
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
net = tf_util.fully_connected(net, 1024, bn=True, is_training=is_training,
scope='fc0', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features, centroids
def get_model_rbf3(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
point_cloud_transformed = tf.expand_dims(point_cloud_transformed, 3)
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
c1 = 512
c2 = 8
centroids = tf.get_variable('centroids',
[1, 1, 3, c1],
initializer=tf.constant_initializer(np.random.randn(1, 1, 3, c1)),
dtype=tf.float32)
sub_centroids = tf.get_variable('sub_centroids',
[1, 1, 3, c2],
initializer=tf.constant_initializer(0.05*np.random.randn(1, 1, 3, c2)),
dtype=tf.float32)
#sub_centroids = tf.constant(0.05*np.random.randn(1, 1, 3, c2), dtype=tf.float32)
sub_bias = tf.add(tf.tile(tf.expand_dims(sub_centroids, 4), [1, 1, 1, 1, c1]),
tf.tile(tf.expand_dims(centroids, 3), [1, 1, 1, c2, 1]))
sub_bias = tf.tile(sub_bias, [batch_size, 1024, 1, 1, 1])
sub_feature = tf.tile(tf.expand_dims(point_cloud_transformed, 4), [1, 1, 1, c2, c1])
sub_net = tf.exp(-tf.square(tf.norm(tf.subtract(sub_feature, sub_bias), ord=3, axis=2, keep_dims=True)))
sub_net = tf.squeeze(sub_net)
sub_net = tf.transpose(sub_net, perm=[0, 1, 3, 2])
sub_net = tf_util.max_pool2d(sub_net, [num_point,1], stride=[1, 1],
padding='VALID', scope='maxpool')
#sub_net = tf_util.conv2d(sub_net, 16, [1,1],
# padding='VALID', stride=[1,1],
# bn=True, is_training=is_training,
# scope='mini_conv1', bn_decay=bn_decay)
sub_net = tf_util.conv2d(sub_net, 2, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='mini_conv2', bn_decay=bn_decay)
sub_net = tf.squeeze(sub_net)
feature = tf.tile(point_cloud_transformed, [1, 1, 1, c1])
bias = tf.tile(centroids, [batch_size, 1024, 1, 1])
net = tf.subtract(feature, bias)
#net = tf.exp(net)
net = tf.norm(net, ord=3, axis=2, keep_dims=True)
net = tf.exp(-tf.square(net))
net = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.expand_dims(tf.squeeze(net), 2)
features = tf.concat([net, sub_net], axis=2)
# Symmetric function: max pooling
#features = tf_util.max_pool2d(net, [num_point,1],
# padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
#net = tf_util.fully_connected(net, 1024, bn=True, is_training=is_training,
# scope='fc0', bn_decay=bn_decay)
#net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
# scope='dp1')
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features, centroids
def get_model_elm(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
input_image = tf.expand_dims(point_cloud_transformed, -1)
random_weights = tf.constant(np.random.randn(3, 4096), dtype=tf.float32)
random_weights1 = tf.expand_dims(random_weights, 0)
random_weights1 = tf.concat([random_weights1, random_weights1], axis=0)#2
random_weights1 = tf.concat([random_weights1, random_weights1], axis=0)#4
random_weights1 = tf.concat([random_weights1, random_weights1], axis=0)#8
random_weights1 = tf.concat([random_weights1, random_weights1], axis=0)#16
random_weights1 = tf.concat([random_weights1, random_weights1], axis=0)#32
net = tf.matmul(point_cloud, random_weights1)
net = tf.expand_dims(net, 2)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
net = tf_util.fully_connected(net, 1024, bn=True, is_training=is_training,
scope='fc0', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features
def get_model_half(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
input_image = tf.expand_dims(point_cloud_transformed, -1)
net = tf_util.conv2d(input_image, 64, [1,3],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv1', bn_decay=bn_decay)
net = tf_util.conv2d(net, 64, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv2', bn_decay=bn_decay)
with tf.variable_scope('transform_net2', reuse=tf.AUTO_REUSE) as sc:
transform = feature_transform_net(net, is_training, bn_decay, K=64)
end_points['transform'] = transform
net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)
net_transformed = tf.expand_dims(net_transformed, [2])
net = tf_util.conv2d(net_transformed, 64, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv3', bn_decay=bn_decay)
net = tf_util.conv2d(net, 128, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv4', bn_decay=bn_decay)
net = tf_util.conv2d(net, 512, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv5', bn_decay=bn_decay)
c1 = 512
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
centroids = tf.get_variable('centroids',
[1, 1, 3, c1],
initializer=tf.constant_initializer(0.5*np.random.randn(1, 1, 3, c1)),
dtype=tf.float32)
net2 = tf.subtract(tf.tile(tf.expand_dims(point_cloud_transformed, 3), [1, 1, 1, c1]), tf.tile(centroids, [batch_size, num_point, 1, 1]))
net2 = tf.norm(net2, axis=2, keep_dims=True)
net2 = tf.exp(-net2)
net = tf.concat([net, net2], axis=2)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features, centroids
def get_model_rbf_transform(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
input_image = tf.expand_dims(point_cloud_transformed, -1)
c1 = 64
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
centroids = tf.get_variable('centroids',
[1, 1, 3, c1],
initializer=tf.constant_initializer(0.5*np.random.randn(1, 1, 3, c1)),
dtype=tf.float32)
net = tf.subtract(tf.tile(tf.expand_dims(point_cloud_transformed, 3), [1, 1, 1, c1]), tf.tile(centroids, [batch_size, num_point, 1, 1]))
net = tf.norm(net, axis=2, keep_dims=True)
net = tf.exp(-net)
with tf.variable_scope('transform_net2', reuse=tf.AUTO_REUSE) as sc:
transform = feature_transform_net(net, is_training, bn_decay, K=64)
end_points['transform'] = transform
net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)
net_transformed = tf.expand_dims(net_transformed, [2])
c2 = 256
#centroids = tf.constant(np.random.randn(1, 1, 3, 1024), dtype=tf.float32)
centroids2 = tf.get_variable('centroids2',
[1, 1, c2, 64],
initializer=tf.constant_initializer(0.5*np.random.randn(1, 1, c2, 64)),
dtype=tf.float32)
net = tf.subtract(tf.tile(net_transformed, [1, 1, c2, 1]), tf.tile(centroids2, [batch_size, num_point, 1, 1]))
net = tf.norm(net, axis=3, keep_dims=True)
net = tf.exp(-net)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features, centroids
def get_model(point_cloud, is_training, bn_decay=None):
""" Classification PointNet, input is BxNx3, output Bx40 """
batch_size = point_cloud.get_shape()[0].value
num_point = point_cloud.get_shape()[1].value
end_points = {}
with tf.variable_scope('transform_net1', reuse=tf.AUTO_REUSE) as sc:
transform = input_transform_net(point_cloud, is_training, bn_decay, K=3)
point_cloud_transformed = tf.matmul(point_cloud, transform)
input_image = tf.expand_dims(point_cloud_transformed, -1)
net = tf_util.conv2d(input_image, 64, [1,3],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv1', bn_decay=bn_decay)
net = tf_util.conv2d(net, 64, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv2', bn_decay=bn_decay)
with tf.variable_scope('transform_net2', reuse=tf.AUTO_REUSE) as sc:
transform = feature_transform_net(net, is_training, bn_decay, K=64)
end_points['transform'] = transform
net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)
net_transformed = tf.expand_dims(net_transformed, [2])
net = tf_util.conv2d(net_transformed, 64, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv3', bn_decay=bn_decay)
net = tf_util.conv2d(net, 128, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv4', bn_decay=bn_decay)
net = tf_util.conv2d(net, 1024, [1,1],
padding='VALID', stride=[1,1],
bn=True, is_training=is_training,
scope='conv5', bn_decay=bn_decay)
# Symmetric function: max pooling
features = tf_util.max_pool2d(net, [num_point,1],
padding='VALID', scope='maxpool')
net = tf.reshape(features, [batch_size, -1])
net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,
scope='fc1', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp1')
net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,
scope='fc2', bn_decay=bn_decay)
net = tf_util.dropout(net, keep_prob=0.7, is_training=is_training,
scope='dp2')
net = tf_util.fully_connected(net, 40, activation_fn=None, scope='fc3')
return net, end_points, features
def get_loss(pred, label, end_points, reg_weight=0.001):
""" pred: B*NUM_CLASSES,
label: B, """
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred, labels=label)
classify_loss = tf.reduce_mean(loss)
tf.summary.scalar('classify loss', classify_loss)
# Enforce the transformation as orthogonal matrix
transform = end_points['transform'] # BxKxK
K = transform.get_shape()[1].value
mat_diff = tf.matmul(transform, tf.transpose(transform, perm=[0,2,1]))
mat_diff -= tf.constant(np.eye(K), dtype=tf.float32)
mat_diff_loss = tf.nn.l2_loss(mat_diff)
tf.summary.scalar('mat loss', mat_diff_loss)
return classify_loss + mat_diff_loss * reg_weight
if __name__=='__main__':
with tf.Graph().as_default():
inputs = tf.zeros((32,1024,3))
outputs = get_model(inputs, tf.constant(True))
print(outputs)
| 49.44186
| 141
| 0.602106
| 3,780
| 27,638
| 4.166402
| 0.053175
| 0.093339
| 0.042288
| 0.080005
| 0.895358
| 0.880119
| 0.87142
| 0.862785
| 0.85561
| 0.849006
| 0
| 0.04708
| 0.271438
| 27,638
| 558
| 142
| 49.530466
| 0.735052
| 0.118822
| 0
| 0.756098
| 0
| 0
| 0.030955
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026829
| false
| 0
| 0.017073
| 0
| 0.070732
| 0.002439
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
300da39875fe59cdec6a31fbd6e1a8d4360789d9
| 454
|
py
|
Python
|
CMU-Crowd/main/decorators.py
|
A2Zadeh/CMU-Crowd
|
43949351a1a8534c63355eb819602c1ffc070594
|
[
"MIT"
] | 2
|
2019-10-08T06:39:21.000Z
|
2019-12-19T20:18:27.000Z
|
CMU-Crowd/main/decorators.py
|
A2Zadeh/CMU-Crowd
|
43949351a1a8534c63355eb819602c1ffc070594
|
[
"MIT"
] | 4
|
2020-02-12T00:40:23.000Z
|
2021-06-10T21:35:36.000Z
|
CMU-Crowd/main/decorators.py
|
A2Zadeh/CMU-Crowd
|
43949351a1a8534c63355eb819602c1ffc070594
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.decorators import user_passes_test
def worker_required(function=None):
actual_decorator = user_passes_test(
lambda u: u.is_worker)
if function:
return actual_decorator(function)
return actual_decorator
def admin_required(function=None):
actual_decorator = user_passes_test(
lambda u: u.is_admin)
if function:
return actual_decorator(function)
return actual_decorator
| 28.375
| 59
| 0.73348
| 57
| 454
| 5.561404
| 0.385965
| 0.283912
| 0.252366
| 0.365931
| 0.750789
| 0.750789
| 0.750789
| 0.750789
| 0.750789
| 0.37224
| 0
| 0
| 0.207048
| 454
| 16
| 60
| 28.375
| 0.880556
| 0
| 0
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0.230769
| 0.076923
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
301eb56dcc5885b4cbc474bf5a5514faa7b0cf4b
| 14,283
|
py
|
Python
|
challenge/agoda_cancellation_prediction.py
|
roizhv22/IML.HUJI
|
b6efdf4ca21ef5cf33da222d74330a19e2177527
|
[
"MIT"
] | null | null | null |
challenge/agoda_cancellation_prediction.py
|
roizhv22/IML.HUJI
|
b6efdf4ca21ef5cf33da222d74330a19e2177527
|
[
"MIT"
] | null | null | null |
challenge/agoda_cancellation_prediction.py
|
roizhv22/IML.HUJI
|
b6efdf4ca21ef5cf33da222d74330a19e2177527
|
[
"MIT"
] | null | null | null |
import math
import sklearn.linear_model
import sklearn.neural_network
import tqdm
from sklearn.base import BaseEstimator
from challenge.agoda_cancellation_estimator import AgodaCancellationEstimator
from IMLearn.utils import split_train_test
import numpy as np
import pandas as pd
import time
def load_data(filename: str):
"""
Load Agoda booking cancellation dataset
Parameters
----------
filename: str
Path to house prices dataset
Returns
-------
Design matrix and response vector in either of the following formats:
1) Single dataframe with last column representing the response
2) Tuple of pandas.DataFrame and Series
3) Tuple of ndarray of shape (n_samples, n_features) and ndarray of shape (n_samples,)
"""
# TODO - replace below code with any desired preprocessing
full_data = pd.read_csv(filename).drop_duplicates()
# features = full_data[["h_booking_id",
# "hotel_id",
# "accommadation_type_name",
# "hotel_star_rating",
# "customer_nationality"]]
full_data['booking_datetime'] = pd.to_datetime(
full_data['booking_datetime'])
full_data['booking_datetime_year'] = full_data['booking_datetime'].dt.year
full_data['booking_datetime_month'] = full_data[
'booking_datetime'].dt.month
full_data['booking_datetime_week'] = full_data['booking_datetime'].dt.week
full_data['booking_datetime_day'] = full_data['booking_datetime'].dt.day
full_data['booking_datetime_hour'] = full_data['booking_datetime'].dt.hour
full_data['booking_datetime_minute'] = full_data[
'booking_datetime'].dt.minute
full_data['booking_datetime_day_of_week'] = full_data[
'booking_datetime'].dt.dayofweek
full_data = full_data.drop(["booking_datetime"], axis=1)
full_data['checkin_date'] = pd.to_datetime(full_data['checkin_date'])
full_data['checkin_date_year'] = full_data['checkin_date'].dt.year
full_data['checkin_date_month'] = full_data['checkin_date'].dt.month
full_data['checkin_date_week'] = full_data['checkin_date'].dt.week
full_data['checkin_date_day'] = full_data['checkin_date'].dt.day
full_data['checkin_date_hour'] = full_data['checkin_date'].dt.hour
full_data['checkin_date_minute'] = full_data['checkin_date'].dt.minute
full_data['checkin_date_day_of_week'] = full_data[
'checkin_date'].dt.dayofweek
full_data = full_data.drop(["checkin_date"], axis=1)
full_data['checkout_date'] = pd.to_datetime(full_data['checkout_date'])
full_data['checkout_date_year'] = full_data['checkout_date'].dt.year
full_data['checkout_date_month'] = full_data['checkout_date'].dt.month
full_data['checkout_date_week'] = full_data['checkout_date'].dt.week
full_data['checkout_date_day'] = full_data['checkout_date'].dt.day
full_data['checkout_date_hour'] = full_data['checkout_date'].dt.hour
full_data['checkout_date_minute'] = full_data['checkout_date'].dt.minute
full_data['checkout_date_day_of_week'] = full_data[
'checkout_date'].dt.dayofweek
full_data = full_data.drop(["checkout_date"], axis=1)
full_data['hotel_live_date'] = pd.to_datetime(full_data['hotel_live_date'])
full_data['hotel_live_date_year'] = full_data['hotel_live_date'].dt.year
full_data['hotel_live_date_month'] = full_data['hotel_live_date'].dt.month
full_data['hotel_live_date_week'] = full_data['hotel_live_date'].dt.week
full_data['hotel_live_date_day'] = full_data['hotel_live_date'].dt.day
full_data['hotel_live_date_hour'] = full_data['hotel_live_date'].dt.hour
full_data['hotel_live_date_minute'] = full_data[
'hotel_live_date'].dt.minute
full_data['hotel_live_date_day_of_week'] = full_data[
'hotel_live_date'].dt.dayofweek
full_data = full_data.drop(["hotel_live_date"], axis=1)
# add dummies to parse the cancelation policy
interesting_policies = ['1D1N_1N', '1D100P', '3D1N_1N', '3D1N_100P',
'3D100P', '365D100P_100P',
'2D100P', '3D100P_100P', '1D100P_100P',
'7D100P_100P', '7D1N_100P',
'0D0N', '7D100P', '2D1N_1N', '14D100P_100P',
'1D20P_100P', '1D1N_100P',
'2D100P_100P', 'UNKNOWN', '14D100P']
interesting_nationalities = ['South Korea', 'Taiwan', 'Malaysia',
'Hong Kong', 'Japan', 'China',
'UNKNOWN', 'Thailand', 'Philippines',
'United States of America',
'Singapore', 'Indonesia', 'Australia',
'Vietnam', 'United Kingdom',
'India', 'Saudi Arabia', 'Russia', 'Macau',
'France']
dict1_ = {interesting_policies[i]: 2 ** i for i in
range(len(interesting_policies) - 1, -1, -1)}
dict2_ = {interesting_nationalities[i]: 2 ** i for i in
range(len(interesting_nationalities) - 1, -1, -1)}
def parse_codes(val):
if val in dict1_.keys():
return dict1_[val]
return 0
def parse_countries(val):
if val in dict2_.keys():
return dict2_[val]
return 0
def charge_preprocess(val):
if val == "Pay Later":
return 100
elif val == "Pay Now":
return 20
else:
return -1
full_data["relevant_cancel"] = full_data['cancellation_policy_code'].apply(
parse_codes)
full_data["relevant_nationality"] = full_data[
'customer_nationality'].apply(parse_countries)
full_data['charge_parsed'] = full_data['charge_option'].apply(
charge_preprocess)
# full_data['cancellation_datetime'] = pd.to_datetime(full_data['cancellation_datetime'])
labels = full_data["cancellation_datetime"]
labels = labels.apply(date_to_bool)
features = full_data.drop(
["cancellation_datetime", "h_booking_id", "h_customer_id",
'cancellation_policy_code', 'customer_nationality',
'hotel_id'],
axis=1)
features = features.applymap(str_to_ascii)
return features, labels
def date_to_bool(value):
if isinstance(value, str):
return 1
if math.isnan(value):
return 0
else:
return 1
# math.isnan(float('nan'))
def str_to_ascii(cur_str):
if not isinstance(cur_str, str):
if math.isnan(cur_str):
return 0
return cur_str
cur_sum = 0
for c in cur_str:
cur_sum += ord(c)
return cur_sum
def evaluate_and_export(estimator: BaseEstimator, X: np.ndarray,
filename: str):
"""
Export to specified file the prediction results of given estimator on given testset.
File saved is in csv format with a single column named 'predicted_values' and n_samples rows containing
predicted values.
Parameters
----------
estimator: BaseEstimator or any object implementing predict() method as in BaseEstimator (for example sklearn)
Fitted estimator to use for prediction
X: ndarray of shape (n_samples, n_features)
Test design matrix to predict its responses
filename:
path to store file at
"""
pd.DataFrame(estimator.predict(X), columns=["predicted_values"]).to_csv(
filename, index=False)
def parse_test(df: pd.DataFrame):
return df['h_booking_id|label'].apply(lambda row: int(row[-1]))
def load_set_label(filename: str):
"""
Load Agoda booking cancellation dataset
Parameters
----------
filename: str
Path to house prices dataset
Returns
-------
Design matrix and response vector in either of the following formats:
1) Single dataframe with last column representing the response
2) Tuple of pandas.DataFrame and Series
3) Tuple of ndarray of shape (n_samples, n_features) and ndarray of shape (n_samples,)
"""
# TODO - replace below code with any desired preprocessing
full_data = pd.read_csv(filename).drop_duplicates()
# features = full_data[["h_booking_id",
# "hotel_id",
# "accommadation_type_name",
# "hotel_star_rating",
# "customer_nationality"]]
full_data['booking_datetime'] = pd.to_datetime(
full_data['booking_datetime'])
full_data['booking_datetime_year'] = full_data['booking_datetime'].dt.year
full_data['booking_datetime_month'] = full_data[
'booking_datetime'].dt.month
full_data['booking_datetime_week'] = full_data['booking_datetime'].dt.week
full_data['booking_datetime_day'] = full_data['booking_datetime'].dt.day
full_data['booking_datetime_hour'] = full_data['booking_datetime'].dt.hour
full_data['booking_datetime_minute'] = full_data[
'booking_datetime'].dt.minute
full_data['booking_datetime_day_of_week'] = full_data[
'booking_datetime'].dt.dayofweek
full_data = full_data.drop(["booking_datetime"], axis=1)
full_data['checkin_date'] = pd.to_datetime(full_data['checkin_date'])
full_data['checkin_date_year'] = full_data['checkin_date'].dt.year
full_data['checkin_date_month'] = full_data['checkin_date'].dt.month
full_data['checkin_date_week'] = full_data['checkin_date'].dt.week
full_data['checkin_date_day'] = full_data['checkin_date'].dt.day
full_data['checkin_date_hour'] = full_data['checkin_date'].dt.hour
full_data['checkin_date_minute'] = full_data['checkin_date'].dt.minute
full_data['checkin_date_day_of_week'] = full_data[
'checkin_date'].dt.dayofweek
full_data = full_data.drop(["checkin_date"], axis=1)
full_data['checkout_date'] = pd.to_datetime(full_data['checkout_date'])
full_data['checkout_date_year'] = full_data['checkout_date'].dt.year
full_data['checkout_date_month'] = full_data['checkout_date'].dt.month
full_data['checkout_date_week'] = full_data['checkout_date'].dt.week
full_data['checkout_date_day'] = full_data['checkout_date'].dt.day
full_data['checkout_date_hour'] = full_data['checkout_date'].dt.hour
full_data['checkout_date_minute'] = full_data['checkout_date'].dt.minute
full_data['checkout_date_day_of_week'] = full_data[
'checkout_date'].dt.dayofweek
full_data = full_data.drop(["checkout_date"], axis=1)
full_data['hotel_live_date'] = pd.to_datetime(full_data['hotel_live_date'])
full_data['hotel_live_date_year'] = full_data['hotel_live_date'].dt.year
full_data['hotel_live_date_month'] = full_data['hotel_live_date'].dt.month
full_data['hotel_live_date_week'] = full_data['hotel_live_date'].dt.week
full_data['hotel_live_date_day'] = full_data['hotel_live_date'].dt.day
full_data['hotel_live_date_hour'] = full_data['hotel_live_date'].dt.hour
full_data['hotel_live_date_minute'] = full_data[
'hotel_live_date'].dt.minute
full_data['hotel_live_date_day_of_week'] = full_data[
'hotel_live_date'].dt.dayofweek
full_data = full_data.drop(["hotel_live_date"], axis=1)
# add dummies to parse the cancelation policy
interesting_policies = ['1D1N_1N', '1D100P', '3D1N_1N', '3D1N_100P',
'3D100P', '365D100P_100P',
'2D100P', '3D100P_100P', '1D100P_100P',
'7D100P_100P', '7D1N_100P',
'0D0N', '7D100P', '2D1N_1N', '14D100P_100P',
'1D20P_100P', '1D1N_100P',
'2D100P_100P', 'UNKNOWN', '14D100P']
interesting_nationalities = ['South Korea', 'Taiwan', 'Malaysia',
'Hong Kong', 'Japan', 'China',
'UNKNOWN', 'Thailand', 'Philippines',
'United States of America',
'Singapore', 'Indonesia', 'Australia',
'Vietnam', 'United Kingdom',
'India', 'Saudi Arabia', 'Russia', 'Macau',
'France']
dict1_ = {interesting_policies[i]: 2 ** i for i in
range(len(interesting_policies) - 1, -1, -1)}
dict2_ = {interesting_nationalities[i]: 2 ** i for i in
range(len(interesting_nationalities) - 1, -1, -1)}
def parse_codes(val):
if val in dict1_.keys():
return dict1_[val]
return 0
def parse_countries(val):
if val in dict2_.keys():
return dict2_[val]
return 0
def charge_preprocess(val):
if val == "Pay Later":
return 100
elif val == "Pay Now":
return 20
else:
return -1
full_data["relevant_cancel"] = full_data['cancellation_policy_code'].apply(
parse_codes)
full_data["relevant_nationality"] = full_data[
'customer_nationality'].apply(parse_countries)
full_data['charge_parsed'] = full_data['charge_option'].apply(
charge_preprocess)
features = full_data.drop(
["h_booking_id", "h_customer_id",
'cancellation_policy_code', 'customer_nationality',
'hotel_id'],
axis=1)
features = features.applymap(str_to_ascii)
return features
if __name__ == '__main__':
np.random.seed(0)
# Load data
df, cancellation_labels = load_data(
"../datasets/agoda_cancellation_train.csv")
estimator = AgodaCancellationEstimator()
print("fitting")
estimator.fit(df, cancellation_labels)
print("predicting")
val = []
# Fit model over data
for i in range(1, 5):
test_X = load_set_label(f"../challenge/test_set_week_{i}.csv")
test_Y = parse_test(
pd.read_csv(f"../challenge/test_set_week_{i}_labels.csv",
dtype=str))
# evaluate_and_export(estimator, test_X, f"results_{i}.csv")
val.append(estimator.loss(test_X, test_Y))
# print(f"Loss on test_{i} was {val[-1]}")
print(val)
# Store model predictions over test set
# print("finishing")
| 40.808571
| 114
| 0.641392
| 1,788
| 14,283
| 4.782998
| 0.139262
| 0.15435
| 0.051684
| 0.086062
| 0.812325
| 0.805309
| 0.79689
| 0.793265
| 0.793265
| 0.793265
| 0
| 0.024498
| 0.239796
| 14,283
| 349
| 115
| 40.925501
| 0.763124
| 0.154799
| 0
| 0.783898
| 0
| 0
| 0.299394
| 0.064204
| 0
| 0
| 0
| 0.005731
| 0
| 1
| 0.050847
| false
| 0
| 0.042373
| 0.004237
| 0.190678
| 0.012712
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3020c638591c67dbda3e34c421e49f0c395f469f
| 44
|
py
|
Python
|
hello_kjmerf/say_hello.py
|
kjmerf/hello_pypi
|
0416a59fb80a1bc86dafdfebb457d8bf5ec207bb
|
[
"MIT"
] | null | null | null |
hello_kjmerf/say_hello.py
|
kjmerf/hello_pypi
|
0416a59fb80a1bc86dafdfebb457d8bf5ec207bb
|
[
"MIT"
] | 1
|
2020-04-05T16:54:28.000Z
|
2020-04-05T16:54:28.000Z
|
hello_kjmerf/say_hello.py
|
kjmerf/hello_kjmerf
|
0416a59fb80a1bc86dafdfebb457d8bf5ec207bb
|
[
"MIT"
] | null | null | null |
def say_hello():
return 'hello kjmerf!'
| 14.666667
| 26
| 0.659091
| 6
| 44
| 4.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.204545
| 44
| 2
| 27
| 22
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0.295455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
3055292fcca9448bf364ebae2f3cfab444377aa8
| 164
|
py
|
Python
|
docs/manual/gears/examples/rng_cnt.py
|
bogdanvuk/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 120
|
2018-04-23T08:29:04.000Z
|
2022-03-30T14:41:52.000Z
|
docs/manual/gears/examples/rng_cnt.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-07-09T17:12:58.000Z
|
2022-03-18T09:05:10.000Z
|
docs/manual/gears/examples/rng_cnt.py
|
FZP1607152286/pygears
|
a0b21d445e1d5c89ad66751447b8253536b835ee
|
[
"MIT"
] | 12
|
2019-05-10T19:42:08.000Z
|
2022-03-28T18:26:44.000Z
|
from pygears.lib import rng
from pygears.lib import shred
from pygears.lib.verif import drv
from pygears.typing import Uint
drv(t=Uint[4], seq=[10]) | rng | shred
| 23.428571
| 38
| 0.762195
| 29
| 164
| 4.310345
| 0.482759
| 0.352
| 0.336
| 0.32
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.140244
| 164
| 6
| 39
| 27.333333
| 0.865248
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
307b1de3f6d3fb302b107882901df84bd4d56b93
| 16,285
|
py
|
Python
|
pdb_prep.py
|
continuous-symmetry-measure/pdb_prep
|
6cbd2ca43b4a52c998fdbd7e50a6ac48f991862f
|
[
"BSD-2-Clause"
] | null | null | null |
pdb_prep.py
|
continuous-symmetry-measure/pdb_prep
|
6cbd2ca43b4a52c998fdbd7e50a6ac48f991862f
|
[
"BSD-2-Clause"
] | 4
|
2018-12-06T08:57:24.000Z
|
2019-04-10T19:53:50.000Z
|
pdb_prep.py
|
continuous-symmetry-measure/pdb_prep
|
6cbd2ca43b4a52c998fdbd7e50a6ac48f991862f
|
[
"BSD-2-Clause"
] | 1
|
2021-11-21T10:18:17.000Z
|
2021-11-21T10:18:17.000Z
|
#!/usr/bin/env python
# import sys
# print(f"path: {sys.path}")
import click
from Chemistry.PDB.pdb_utils import *
from PDB_Prep.clean_stages import stages
from PDB_Prep.pdb_prep_functions import copy_data_into_dir, clean_tmp_data_dir_mode, clean_tmp_data_file_mode, \
finish_outputs, validate_options, other_methods_validate_params
from PDB_Prep.pdb_prep_functions import xray_validate_params, nmr_validate_params
from PDB_Prep.pdb_prep_inform import xray_inform, nmr_inform, other_methods_inform
from Utils.cli_utils import cli_utils as cu
from version import __VERSION__
@click.group()
def cli():
"""
pdb preprations
need help?
try : pdb_prep COMMAND --help
"""
pass
@cli.command()
@click.option('--pdb-dir', default='.', help='The input pdb directory containing PDB files', show_default=True)
@click.option('--pdb-file', help='Input pdb file (use this or the --pdb-dir option!)', show_default=True)
@click.option('--with-hydrogens/--no-hydrogens', default=False,
help='Leave hydrogen atoms and hetatms from the files - default --no-hydrogens') # , show_default=True)
@click.option('--ptype',
type=click.Choice(['homomer', 'heteromer', 'monomer'], case_sensitive=False),
help="Protein stoichiometry (defualt: homomer)")
@click.option('--parse-rem350/--ignore-rem350', default=True,
help='Parse or ignore remark 350 - default --parse-rem350') # show_default=True)
@click.option('--bio-molecule-chains', type=click.INT, help='Number of peptides in remark 350')
@click.option('--output-dir', default='output.{time}', help='Output dir', show_default=True)
@click.option('--output-text/--output-json', default=True,
help='Output report in text or json - default --output-text') # , show_default=True)
@click.option('--verbose', is_flag=True, default=False, help='Verbose mode', show_default=True)
def nmr(pdb_dir, pdb_file, with_hydrogens, ptype, parse_rem350, bio_molecule_chains, output_dir,
output_text, verbose):
"""
\b
This procedure prepares protein files in pdb format from NMR measurements for
a CSM calculation according to the following stage:
1. Removing non-coordinates lines from the atom section.
2. Removing ligands and solvent lines at the end of peptides.
HETATOM lines in the middle of a peptide are retained.
3. Cleaning gaps in the sequence according to REMARK 470 (missing residues)
and REMARK 465 (missing atoms):
a. If a backbone atom is missing - the whole amino acid is deleted.
b. If a side chain atom is missing – the side chain is removed.
c. For homomers – gap on one peptide causes the removal of the related
atoms from all other peptides.
4. Retaining the first location in cases of alternate location.
5. Removing hydrogen atoms (optional).
6. Ignoring pdb files for which the asymmetric unit does not represent a
biological structure (e.g., non unit matrix in REMARK 350).
7. For homomers, checking that all peptides are of the same length.
"""
print("Version: {}".format(__VERSION__))
ret_val = func_nmr(pdb_dir, pdb_file, with_hydrogens, ptype, parse_rem350, bio_molecule_chains, output_dir,
output_text, verbose)
exit(ret_val)
def func_nmr(pdb_dir, pdb_file, with_hydrogens, ptype, parse_rem350, bio_molecule_chains, output_dir,
output_text, verbose):
caller = func_nmr.__name__
is_homomer = True
if ptype is None and bio_molecule_chains == 1:
ptype = 'monomer'
cu(click).msg("The option '--ptype' was set to monomer since you set '--bio-molecule-chains' to 1")
if ptype == 'heteromer':
is_homomer = False
ignore_remarks = []
if not parse_rem350:
ignore_remarks.append(350)
cliutils = nmr_validate_params(pdb_dir=pdb_dir, pdb_file=pdb_file, output_dir=output_dir, verbose=verbose)
if not validate_options(parse_rem350, bio_molecule_chains, ptype, cliutils):
return -1
cliutils.verbose("options are valid", caller=caller)
informer = nmr_inform(cliutils, is_verbose=verbose, include_hetatm=True, ignore_remarks=ignore_remarks,
bio_molecule_chains=bio_molecule_chains)
return func_process_non_xray(pdb_dir, pdb_file, with_hydrogens, bio_molecule_chains, output_text, is_homomer,
ignore_remarks,
informer, cliutils, verbose)
def func_process_non_xray(pdb_dir, pdb_file, with_hydrogens, bio_molecule_chains, output_text, is_homomer,
ignore_remarks, informer, cliutils, verbose):
caller = func_process_non_xray.__name__
report = ""
cliutils.verbose("Start", caller=caller)
mode_file_or_dir = None
short_file_name = None
if pdb_file:
mode_file_or_dir = "file"
parse_rem350 = False
pdb_dir, short_file_name = os.path.split(pdb_file)
informer.process_one_file(pdb_dir, short_file_name, click)
cliutils.verbose("{} - informer.process_one_file - finished".format(short_file_name), caller=caller)
# informer.ignore_remarks.append(2) # remark 2 is resolution - ignore it
# informer.ignore_remarks.append(3) # remark 3 is r_free - ignore it
elif pdb_dir:
mode_file_or_dir = "dir"
try:
informer.process_complete_dir(pdb_dir, click)
cliutils.verbose("informer.process_complete_dir ended", caller=caller)
except IndexError as e:
cliutils.error_msg("I did not find any PDB files in the input folder", caller=caller)
return 31
# limit_r_free_grade_text = r_free_grade_values.from_value(limit_r_free_grade)
informer.filter_data(click=click, test_is_homomer=is_homomer)
stager = stages(cliutils, informer, is_homomer=is_homomer)
for directory, data, copy_or_clean in sorted(informer.output_data_config):
if len(data) == 0:
cliutils.verbose("{} - no items to process - I will continue".format(directory), caller=caller)
continue
dest_path = stager.set_dest_path(directory)
cliutils.verbose("creating: {} ".format(dest_path), caller=caller)
rv = cliutils.mkdir(dirname=dest_path, raise_error_if_exists=False)
if rv != 0:
cliutils.exit(rv, 'ERROR', "could not mkdir {} retval is {} ".format(directory, rv))
if copy_or_clean == 'copy':
copy_data_into_dir(source_path=pdb_dir, dest_path=dest_path, data=data, cliutils=cliutils)
else:
informer.data, report = stager.run_clean_stages(
directory=directory,
dest_path=dest_path,
data=data,
with_hydrogens=with_hydrogens,
ignore_remarks=ignore_remarks,
bio_molecule_chains=bio_molecule_chains
)
if mode_file_or_dir == "file":
clean_tmp_data_file_mode(stager, pdb_dir, short_file_name, informer, cliutils)
else:
clean_tmp_data_dir_mode(stager, pdb_dir, informer, cliutils)
if output_text:
output_type = "text"
else:
output_type = "json"
finish_outputs(mode_file_or_dir, informer, cliutils, stager, report, output_type)
return 0
@cli.command()
@click.option('--pdb-dir', default='.', help='Input pdb directory containing PDB files', show_default=True)
@click.option('--pdb-file', help='Input pdb file (use this or the --pdb-dir option!)', show_default=True)
@click.option('--max-resolution', default=2.0, type=float, help='Maximum allowed resolution', show_default=True)
@click.option('--limit-r-free-grade', default='C', type=click.Choice(['A', 'B', 'C', 'D', 'E']),
help='Limit for R_free_grade:\n' +
'A - MUCH BETTER THAN AVERAGE at this resolution\n' +
'B - BETTER THAN AVERAGE at this resolution\n' +
'C - AVERAGE at this resolution\n' +
'D - WORSE THAN AVERAGE at this resolution\n' +
'E - UNRELIABLE\n',
show_default=True)
@click.option('--with-hydrogens/--no-hydrogens', default=False,
help='Leave hydrogen atoms and hetatms from the files - default --no-hydrogens') # , show_default=True)
@click.option('--ptype',
type=click.Choice(['homomer', 'heteromer', 'monomer'], case_sensitive=False),
help="Protein stoichiometry (defualt: homomer)")
@click.option('--parse-rem350/--ignore-rem350', default=True,
help='Parse or ignore remark 350 - default --parse-rem350') # show_default=True)
@click.option('--bio-molecule-chains', type=click.INT, help='Number of peptides in remark 350')
@click.option('--output-dir', default='output.{time}', help='Output dir', show_default=True)
@click.option('--output-text/--output-json', default=True,
help='Output report in text or json - default --output-text') # , show_default=True)
@click.option('--verbose', is_flag=True, default=False, help='Verbose mode', show_default=True)
def xray(pdb_dir, pdb_file, max_resolution, limit_r_free_grade, with_hydrogens, ptype,
parse_rem350, bio_molecule_chains, output_dir, output_text, verbose):
"""
\b
This procedure prepares protein files in pdb format from X-RAY measurements for a
CSM calculation according.
At first, the files are split into three categories according to their resolution
and R_free grade:
a. Reliable – PDB files with a resolution of up to 2.0 and an R_free grade of C
(Average at this resolution). Thresholds can be changed.
b. Reliable_r_grade – PDB files with a resolution of up to 2.0 and no R_free data
c. Others – PDB files with bad resolution or R_free grade
Reliable files are further processed according to the following stages:
1. Removing non-coordinates lines from the atom section.
2. Removing ligands and solvent lines at the end of peptides. HETATOM lines in the
middle of a peptide are retained.
3. Cleaning gaps in the sequence according to REMARK 470 (missing residues) and REMARK
465 (missing atoms):
a. If a backbone atom is missing - the whole amino acid is deleted.
b. If a side chain atom is missing – the side chain is removed.
c. For homomers – gap on one peptide causes the removal of the related atoms from
all other peptides.
4. Retaining the first location in cases of alternate location.
5. Removing hydrogen atoms (optional).
6. Ignoring pdb files for which the asymmetric unit does not represent a biological structure
(e.g., non unit matrix in REMARK 350).
7. For homomers, checking that all peptides are of the same length.
"""
print("Version: {}".format(__VERSION__))
# print(f"path: {sys.path}")
ret_val = func_xray(pdb_dir, pdb_file, max_resolution, limit_r_free_grade, with_hydrogens, ptype,
parse_rem350, bio_molecule_chains, output_dir, output_text, verbose)
exit(ret_val)
def func_xray(pdb_dir, pdb_file, max_resolution, limit_r_free_grade, with_hydrogens, ptype,
parse_rem350, bio_molecule_chains, output_dir, output_text, verbose):
caller = func_xray.__name__
report = ""
is_homomer = True
if ptype is None and bio_molecule_chains == 1:
ptype = 'monomer'
cu(click).msg("The option '--ptype' was set to monomer since you set '--bio-molecule-chains' to 1")
if ptype == 'heteromer':
is_homomer = False
ignore_remarks = []
if not parse_rem350:
ignore_remarks.append(350)
cliutils = xray_validate_params(pdb_dir, pdb_file, max_resolution, limit_r_free_grade, output_dir, verbose)
if not validate_options(parse_rem350, bio_molecule_chains, ptype, cliutils):
return -1
informer = xray_inform(cliutils, is_verbose=verbose, include_hetatm=True, ignore_remarks=ignore_remarks,
bio_molecule_chains=bio_molecule_chains)
mode_file_or_dir = None
short_file_name = None
if pdb_file:
mode_file_or_dir = "file"
pdb_dir, short_file_name = os.path.split(pdb_file)
# print(">>>>>>>>>>{}-{}".format(pdb_file, get_experimental_method(pdb_file)))
informer.process_one_file(pdb_dir, short_file_name, click)
elif pdb_dir:
mode_file_or_dir = "dir"
try:
informer.process_complete_dir(pdb_dir, click)
cliutils.verbose("informer.process_complete_dir ended", caller=caller)
except IndexError as e:
cliutils.error_msg("I did not find any PDB files in the input folder", caller=caller)
return 31
limit_r_free_grade_text = r_free_grade_values.from_value(limit_r_free_grade)
informer.filter_data(max_resolution=max_resolution, limit_r_free_grade=limit_r_free_grade_text, click=click,
test_is_homomer=is_homomer)
# sort the files into directories
stager = stages(cliutils, informer, is_homomer=is_homomer)
for directory, data, copy_or_clean in informer.output_data_config:
if len(data) == 0:
cliutils.verbose("{} - no items to process - I will continue".format(directory), caller=caller)
continue
dest_path = stager.set_dest_path(directory)
cliutils.verbose("creating: {}".format(dest_path), caller=caller)
rv = cliutils.mkdir(dirname=dest_path, raise_error_if_exists=False)
if rv != 0:
cliutils.exit(rv, 'ERROR', "could not mkdir {} retval is {} ".format(directory, rv))
if copy_or_clean == 'copy':
copy_data_into_dir(source_path=pdb_dir, dest_path=dest_path, data=data, cliutils=cliutils)
else:
informer.data, report = stager.run_clean_stages(
directory=directory,
dest_path=dest_path,
data=data,
with_hydrogens=with_hydrogens,
ignore_remarks=ignore_remarks,
bio_molecule_chains=bio_molecule_chains
)
# clean_missing_residues(data)
# missing rsidues
if mode_file_or_dir == "file":
clean_tmp_data_file_mode(stager, pdb_dir, short_file_name, informer, cliutils)
else:
clean_tmp_data_dir_mode(stager, pdb_dir, informer, cliutils)
if output_text:
output_type = "text"
else:
output_type = "json"
finish_outputs(mode_file_or_dir, informer, cliutils, stager, report, output_type)
return 0
def func_others_methods(pdb_dir, pdb_file, with_hydrogens, ptype, parse_rem350, bio_molecule_chains, output_dir,
output_text, verbose):
caller = func_others_methods.__name__
is_homomer = True
if ptype is None and bio_molecule_chains == 1:
ptype = 'monomer'
cu(click).msg("The option '--ptype' was set to monomer since you set '--bio-molecule-chains' to 1")
if ptype == 'heteromer':
is_homomer = False
ignore_remarks = []
if not parse_rem350:
ignore_remarks.append(350)
cliutils = other_methods_validate_params(pdb_dir=pdb_dir, pdb_file=pdb_file, output_dir=output_dir, verbose=verbose)
if not validate_options(parse_rem350, bio_molecule_chains, ptype, cliutils):
return -1
cliutils.verbose("options are valid", caller=caller)
informer = other_methods_inform(cliutils, is_verbose=verbose, include_hetatm=True, ignore_remarks=ignore_remarks,
bio_molecule_chains=bio_molecule_chains)
return func_process_non_xray(pdb_dir, pdb_file, with_hydrogens, bio_molecule_chains, output_text, is_homomer,
ignore_remarks, informer, cliutils, verbose)
if __name__ == '__main__':
cli()
| 50.417957
| 121
| 0.663678
| 2,160
| 16,285
| 4.760185
| 0.132407
| 0.019257
| 0.051255
| 0.027232
| 0.876386
| 0.855865
| 0.842832
| 0.819101
| 0.81171
| 0.81171
| 0
| 0.011534
| 0.244028
| 16,285
| 322
| 122
| 50.574534
| 0.823085
| 0.194105
| 0
| 0.733333
| 0
| 0.013333
| 0.174335
| 0.029454
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031111
| false
| 0.004444
| 0.035556
| 0
| 0.106667
| 0.008889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0656cbe245fa87f613856e93c3b4c350b6bc51fe
| 39
|
py
|
Python
|
Other/__init__.py
|
AcudoDev/FinanceToolbox
|
90676e798f2e8eac164ccfcd6708cc717e1911f2
|
[
"MIT"
] | null | null | null |
Other/__init__.py
|
AcudoDev/FinanceToolbox
|
90676e798f2e8eac164ccfcd6708cc717e1911f2
|
[
"MIT"
] | null | null | null |
Other/__init__.py
|
AcudoDev/FinanceToolbox
|
90676e798f2e8eac164ccfcd6708cc717e1911f2
|
[
"MIT"
] | null | null | null |
from .GaussianRandomStockPrice import *
| 39
| 39
| 0.871795
| 3
| 39
| 11.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
001074359d85d4dce107a92f3f96bbbd67b8a5c9
| 17,102
|
py
|
Python
|
userbot/modules/randoms.py
|
TAMILVIP007/javes-3.0
|
d9238785fa2d79740bbb526aca92455dbccb3838
|
[
"MIT"
] | 1
|
2021-05-06T18:30:50.000Z
|
2021-05-06T18:30:50.000Z
|
userbot/modules/randoms.py
|
hellboi-atul/javes-3.0
|
8777d482bd1ee877a96332a2cd84d880c151fa43
|
[
"MIT"
] | null | null | null |
userbot/modules/randoms.py
|
hellboi-atul/javes-3.0
|
8777d482bd1ee877a96332a2cd84d880c151fa43
|
[
"MIT"
] | null | null | null |
#Made By Sh1vam Donot KANG
# ME MADE MORE THAN ONE AND MORE COMPLEX ONE WAS YT COMMENT
#I REMOVED COLOUR CAUSE ALL NEED TO REMEMBER HEX COLOUR CODES # replaced by %23
import os
import numpy as np
import requests
from PIL import Image
from telegraph import upload_file
from telethon.tl.types import MessageMediaPhoto
import re
from userbot.utils import admin_cmd
from userbot import bot
from userbot import bot as borg
sedpath = "./shivam/"
if not os.path.isdir(sedpath):
os.makedirs(sedpath)
#keep CREDIT LINES ELSE GET LOST
@bot.on(admin_cmd(pattern=r"tig"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/triggered?avatar={imglink}"
r = requests.get(lolul)
open("shivam.gif", "wb").write(r.content)
lolbruh = "shivam.gif"
await borg.send_file(
event.chat_id, lolbruh, caption="Triggered....😬", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"wst"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/wasted?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="⚰️ Wasted... 😵", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"rmbow"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/gay?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="🌈 The Rainbow Efect WOW 🏳️🌈", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"glass"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/glass?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="You got Into the Glass 😐", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"gry"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/greyscale?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="You got grey coloured 😝", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"invert"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/invert?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="i made u inverted 🙃", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"ig"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/invertgreyscale?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="no reactions found 🙄 ", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"brght"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/brightness?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="brightness.....seems to be exploited ig 😶", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"bow"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/threshold?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="choose your which side 😏", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"sepia"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/sepia?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="See this is called sepia 🤐", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"red"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/red?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="bloody red u r now 😂😂😂", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"green"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/green?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="Go Green....Go Green...😂😂😂 ", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"blue"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/blue?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="blue huh hmm what can i tell about this 🤔 ", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"pixlte"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/pixelate?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="pixelate it is u kno 🤣🤣🤣", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"ytc"))
async def lolmetrg(event):
givenvar=event.text
text = givenvar[5:]
try:
global username, comment
username, comment= text.split(".")
except:
await event.edit("`.ytc username.comment reply to image`")
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/youtube-comment?avatar={imglink}&comment={comment}&username={username}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="😁 Guess From Where I Got This Commment 👀 ", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
'''@bot.on(admin_cmd(pattern=r"clr"))
async def lolmetrg(event):
givenvar=event.text
color = givenvar[5:]
await event.edit("`hmm let me see what i can do to this hope u replied it with hex colour code and also put %23 instead of #`")
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/color?avatar={imglink}&color={color}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="Coloured", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)'''
@bot.on(admin_cmd(pattern=r"blur"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/blur?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="blur..🤓", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
@bot.on(admin_cmd(pattern=r"brpl"))
async def lolmetrg(event):
await event.delete()
sed = await event.get_reply_message()
if isinstance(sed.media, MessageMediaPhoto):
img = await borg.download_media(sed.media, sedpath)
elif "image" in sed.media.document.mime_type.split("/"):
img = await borg.download_media(sed.media, sedpath)
else:
await event.edit("Reply To Image")
return
url_s = upload_file(img)
imglink = f"https://telegra.ph{url_s[0]}"
lolul = f"https://some-random-api.ml/canvas/blurple?avatar={imglink}"
r = requests.get(lolul)
open("shivam.png", "wb").write(r.content)
lolbruh = "shivam.png"
await borg.send_file(
event.chat_id, lolbruh, caption="blurple...😅", reply_to=sed
)
for files in (lolbruh, img):
if files and os.path.exists(files):
os.remove(files)
| 37.920177
| 131
| 0.651444
| 2,454
| 17,102
| 4.463325
| 0.08965
| 0.052588
| 0.039441
| 0.065735
| 0.889437
| 0.889437
| 0.884963
| 0.878024
| 0.874372
| 0.870538
| 0
| 0.001846
| 0.208104
| 17,102
| 450
| 132
| 38.004444
| 0.804696
| 0.011285
| 0
| 0.781022
| 0
| 0.002433
| 0.173175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.024331
| 0
| 0.065693
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc5038047b3c8eb1437f23354766dbac1311ed35
| 163,599
|
py
|
Python
|
ppgan/models/lapstyle_model.py
|
JackMcCoy/PaddleGAN
|
a89fa7a1d7edd6a0e227c2941f0641700b20fe70
|
[
"Apache-2.0"
] | null | null | null |
ppgan/models/lapstyle_model.py
|
JackMcCoy/PaddleGAN
|
a89fa7a1d7edd6a0e227c2941f0641700b20fe70
|
[
"Apache-2.0"
] | null | null | null |
ppgan/models/lapstyle_model.py
|
JackMcCoy/PaddleGAN
|
a89fa7a1d7edd6a0e227c2941f0641700b20fe70
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle
import paddle.nn.functional as F
import math,random,os,re
from PIL import Image
import numpy as np
from .base_model import BaseModel
import shutil
from .builder import MODELS
from .generators.builder import build_generator
from .criterions import build_criterion
from .discriminators.builder import build_discriminator
from ..modules.init import init_weights
from ..utils.visual import tensor2img, save_image
from ..utils.filesystem import makedirs, save, load
from itertools import accumulate
def xdog(im, g, g2,morph_conv,gamma=.94, phi=50, eps=-.5, morph_cutoff=8.88,morphs=1,minmax=False):
# Source : https://github.com/CemalUnal/XDoG-Filter
# Reference : XDoG: An eXtended difference-of-Gaussians compendium including advanced image stylization
# Link : http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.365.151&rep=rep1&type=pdf
#imf1 = paddle.concat(x=[g(paddle.unsqueeze(im[:,0,:,:].detach(),axis=1)),g(paddle.unsqueeze(im[:,1,:,:].detach(),axis=1)),g(paddle.unsqueeze(im[:,2,:,:].detach(),axis=1))],axis=1)
imf2=paddle.zeros_like(im)
imf1=paddle.zeros_like(im)
imf1.stop_gradient=True
imf2.stop_gradient=True
imf2=g2(im)
imf1=g(im)
#imf2 = g2(im.detach())
imdiff = imf1 - gamma * imf2
imdiff = (imdiff < eps).astype('float32') * 1.0 + (imdiff >= eps).astype('float32') * (1.0 + paddle.tanh(phi * imdiff))
if type(minmax)==bool:
min = imdiff.min(axis=[2,3],keepdim=True)
max = imdiff.max(axis=[2,3],keepdim=True)
else:
min=minmax[0]
max=minmax[1]
imdiff -= paddle.expand_as(min,imdiff)
imdiff /= paddle.expand_as(max,imdiff)
if type(minmax)==bool:
mean = imdiff.mean(axis=[2,3],keepdim=True)
else:
mean=minmax[2]
exmean=paddle.expand_as(mean,imdiff)
for i in range(morphs):
morphed=morph_conv(imdiff)
morphed.stop_gradient=True
passedlow= paddle.multiply((imdiff>= exmean).astype('float32'),(morphed>= morph_cutoff).astype('float32'))
for i in range(morphs):
passed = morph_conv(passedlow)
passed= (passed>0).astype('float32')
return passed, [min,max,mean]
def gaussian(kernel_size, sigma,channels=3):
x_coord = paddle.arange(kernel_size)
x_grid = paddle.expand(x_coord,(kernel_size,kernel_size))
y_grid = x_grid.t()
xy_grid = paddle.stack([x_grid, y_grid], axis=-1)
mean = (kernel_size - 1) / 2.
variance = sigma ** 2.
# Calculate the 2-dimensional gaussian kernel which is
# the product of two gaussian distributions for two different
# variables (in this case called x and y)
gaussian_kernel = (1. / (2. * math.pi * variance)) * \
paddle.exp(
-paddle.sum((xy_grid - mean) ** 2., axis=-1) / \
(2 * variance)
)
# Make sure sum of values in gaussian kernel equals 1.
gaussian_kernel = gaussian_kernel / paddle.sum(gaussian_kernel)
# Reshape to 2d depthwise convolutional weight
gaussian_kernel = gaussian_kernel.reshape((1,1, kernel_size, kernel_size))
return gaussian_kernel
@MODELS.register()
class LapStyleDraModel(BaseModel):
def __init__(self,
generator_encode,
generator_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleDraModel, self).__init__()
# define generators
self.nets['net_enc'] = build_generator(generator_encode)
self.nets['net_dec'] = build_generator(generator_decode)
init_weights(self.nets['net_dec'])
self.set_requires_grad([self.nets['net_enc']], False)
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
self.cF = self.nets['net_enc'](self.ci)
self.sF = self.nets['net_enc'](self.si)
self.stylized, self.code_loss = self.nets['net_dec'](self.cF, self.sF)
self.visual_items['stylized'] = self.stylized
def backward_Dec(self):
self.tF = self.nets['net_enc'](self.stylized)
"""content loss"""
self.loss_c = 0
for layer in self.content_layers[:-1]:
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""IDENTITY LOSSES"""
self.Icc, book_loss = self.nets['net_dec'](self.cF, self.cF)
self.l_identity1 = self.calc_content_loss(self.Icc, self.ci)
self.Fcc = self.nets['net_enc'](self.Icc)
self.l_identity2 = 0
for layer in self.content_layers:
self.l_identity2 += self.calc_content_loss(self.Fcc[layer],
self.cF[layer])
self.Iss, book_loss_s = self.nets['net_dec'](self.sF, self.sF)
self.l_identity3 = self.calc_content_loss(self.Iss, self.si)
self.Fss = self.nets['net_enc'](self.Iss)
self.l_identity4 = 0
for layer in self.content_layers:
self.l_identity4 += self.calc_content_loss(self.Fss[layer],
self.sF[layer])
self.losses['l_identity1'] = self.l_identity1
self.losses['l_identity2'] = self.l_identity2
self.losses['l_identity3'] = self.l_identity3
self.losses['l_identity4'] = self.l_identity4
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.tF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.tF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
self.losses['f_codebook_loss']=self.code_loss
self.losses['i_cb_loss'] = book_loss
self.losses['i_sb_loss'] = book_loss_s
self.loss = self.loss_c * self.content_weight + self.loss_s * self.style_weight +\
self.l_identity1 * 50 + self.l_identity2 * 1 +\
self.l_identity3 * 50 + self.l_identity4 * 1 +\
self.loss_style_remd * 10 + \
self.loss_content_relt * 16 + self.code_loss
return self.loss
def train_iter(self, optimizers=None):
"""Calculate losses, gradients, and update network weights"""
self.forward()
loss = self.backward_Dec()
loss.backward()
optimizers['optimG'].step()
optimizers['optimG'].clear_grad()
@MODELS.register()
class LapStyleDraXDOG(BaseModel):
def __init__(self,
generator_encode,
generator_transformer,
discriminator,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
mse_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0,
morph_cutoff=8,
gamma=.96):
super(LapStyleDraXDOG, self).__init__()
self.scaler=paddle.amp.GradScaler(init_loss_scaling=1024)
self.first = True
# define generators
self.nets['net_enc'] = build_generator(generator_encode)
#self.nets['net_dec'] = build_generator(generator_decode)
self.nets['net_vit'] = build_generator(generator_transformer)
#init_weights(self.nets['net_dec'])
self.nets['netD'] = build_discriminator(discriminator)
init_weights(self.nets['netD'])
self.set_requires_grad([self.nets['net_enc']], False)
#self.set_requires_grad([self.nets['net_dec']], False)
init_weights(self.nets['net_vit'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.mse_loss = build_criterion(mse_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
self.morph_cutoff=morph_cutoff
self.gamma=gamma
g = np.repeat(gaussian(11, 1).numpy(), 3, axis=0)
g2 = np.repeat(gaussian(21, 3).numpy(), 3, axis=0)
self.gaussian_filter = paddle.nn.Conv2D(3, 3, 11,
groups=3, bias_attr=False,
padding=5, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g), trainable=False)
)
self.gaussian_filter_2 = paddle.nn.Conv2D(3, 3, 21,
groups=3, bias_attr=False,
padding=10, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g2), trainable=False)
)
self.morph_conv = paddle.nn.Conv2D(3, 3, 3, padding=1, groups=3,
padding_mode='reflect', bias_attr=False,
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
self.set_requires_grad([self.morph_conv], False)
self.set_requires_grad([self.gaussian_filter],False)
self.set_requires_grad([self.gaussian_filter_2],False)
self.steps=0
self.mxdog_weight = 1
self.nets['net_vit'].freeze_weight(True)
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
self.cF = self.nets['net_enc'](self.ci)
self.sF = self.nets['net_enc'](self.si)
#dual_tensor = paddle.concat(x=[self.ci, self.si], axis=1)
self.stylized, self.map_loss = self.nets['net_vit'](self.cF, self.sF)
self.visual_items['stylized'] = self.stylized
#self.stylized = self.nets['net_vit'](self.stylized)
#self.visual_items['stylized_vit'] = self.stylized
def backward_Dec(self):
self.cX,_ = xdog(self.ci.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,gamma=self.gamma,morph_cutoff=self.morph_cutoff,morphs=1)
self.sX,_ = xdog(self.si.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,gamma=self.gamma,morph_cutoff=self.morph_cutoff,morphs=1)
self.cXF = self.nets['net_enc'](self.cX)
self.sXF = self.nets['net_enc'](self.sX)
stylized_dog,_ = xdog(paddle.clip(self.stylized,min=0,max=1),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,gamma=self.gamma,morph_cutoff=self.morph_cutoff,morphs=1)
self.cdogF = self.nets['net_enc'](stylized_dog)
self.tF = self.nets['net_enc'](self.stylized)
"""content loss"""
self.loss_c = 0
for idx, layer in enumerate(self.content_layers):
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""IDENTITY LOSSES"""
#dual_ci = paddle.concat(x=[self.ci, self.ci], axis=1)
self.Iss, book_loss_s = self.nets['net_vit'](self.sF,self.sF)
self.l_identity1 = self.calc_content_loss(self.Iss, self.si)
self.Fss = self.nets['net_enc'](self.Iss)
self.l_identity2 = 0
for layer in self.content_layers:
self.l_identity2 += self.calc_content_loss(self.Fss[layer],
self.sF[layer])
self.losses['l_identity1'] = self.l_identity1
self.losses['l_identity2'] = self.l_identity2
self.Icc, book_loss = self.nets['net_vit'](self.cF,self.cF)
self.l_identity3 = self.calc_content_loss(self.Icc, self.ci)
self.Fcc = self.nets['net_enc'](self.Icc)
self.l_identity4 = 0
for layer in self.content_layers:
self.l_identity4 += self.calc_content_loss(self.Fcc[layer],
self.cF[layer])
self.losses['l_identity3'] = self.l_identity3
self.losses['l_identity4'] = self.l_identity4
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.tF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.tF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
mxdog_content = self.calc_content_loss(self.tF['r31'], self.cXF['r31'])+self.calc_content_loss(self.tF['r41'], self.cXF['r41'])
mxdog_content_contraint = self.calc_content_loss(self.cdogF['r31'], self.cXF['r31'])+self.calc_content_loss(self.cdogF['r41'], self.cXF['r41'])
mxdog_style = self.mse_loss(self.cdogF['r31'],self.sXF['r31']) + self.mse_loss(self.cdogF['r41'],self.sXF['r41'])
self.losses['loss_MD'] = mxdog_content*.3
self.losses['loss_CnsC'] = mxdog_content_contraint*100
self.losses['loss_CnsS'] = mxdog_style * 1000
mxdog_losses = mxdog_content * .3 + mxdog_content_contraint *100 + mxdog_style * 1000
self.losses['map_loss'] = self.map_loss
pred_fake = self.nets['netD'](self.stylized)
self.loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = self.loss_G_GAN
self.loss = self.loss_G_GAN + self.loss_c * self.content_weight + self.style_weight * (self.loss_s+self.loss_style_remd*3)+\
self.l_identity1 * 25 + self.l_identity2 * .5 + \
self.l_identity3 * 50 + self.l_identity4 * 1 + \
self.loss_content_relt * 16 +\
self.map_loss + book_loss + book_loss_s+\
mxdog_losses
return self.loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD'](self.stylized.detach())
self.loss_D_fake = self.gan_criterion(pred_fake, False)
pred_real = self.nets['netD'](self.ci)
self.loss_D_real = self.gan_criterion(pred_real, True)
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.losses['D_fake_loss'] = self.loss_D_fake
self.losses['D_real_loss'] = self.loss_D_real
return self.loss_D
def train_iter(self, optimizers=None):
"""Calculate losses, gradients, and update network weights"""
self.steps+=1
with paddle.amp.auto_cast():
self.forward()
self.set_requires_grad(self.nets['netD'], True)
loss = self.backward_D()
scaled = self.scaler.scale(loss)
scaled.backward()
self.scaler.minimize(optimizers['optimD'], scaled)
optimizers['optimD'].clear_grad()
self.set_requires_grad(self.nets['netD'], False)
with paddle.amp.auto_cast():
self.forward()
loss = self.backward_Dec()
scaled = self.scaler.scale(loss)
scaled.backward()
self.scaler.minimize(optimizers['optimG'], scaled)
optimizers['optimG'].clear_grad()
#self.optimizers['optimG'].step()
if self.steps%200==0:
for param in self.nets['net_vit'].parameters():
if 'pos_emb' in param.name:
print(param)
'''
if self.steps<=1000:
self.style_weight-= .001
self.mxdog_weight += .0005
'''
@MODELS.register()
class LapStyleDraVQGAN(BaseModel):
def __init__(self,
generator_encode,
generator_transformer,
discriminator,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
mse_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0,
morph_cutoff=8,
gamma=.96):
super(LapStyleDraVQGAN, self).__init__()
self.scaler=paddle.amp.GradScaler(init_loss_scaling=1024)
self.first = True
# define generators
self.nets['net_enc'] = build_generator(generator_encode)
#self.nets['net_dec'] = build_generator(generator_decode)
self.nets['net_vit'] = build_generator(generator_transformer, self.nets['net_enc'].state_dict())
#init_weights(self.nets['net_dec'])
self.nets['netD'] = build_discriminator(discriminator)
init_weights(self.nets['netD'])
self.set_requires_grad([self.nets['net_enc']], False)
#self.set_requires_grad([self.nets['net_dec']], False)
init_weights(self.nets['net_vit'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.mse_loss = build_criterion(mse_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
self.morph_cutoff=morph_cutoff
self.gamma=gamma
g = np.repeat(gaussian(11, 1).numpy(), 3, axis=0)
g2 = np.repeat(gaussian(21, 3).numpy(), 3, axis=0)
self.gaussian_filter = paddle.nn.Conv2D(3, 3, 11,
groups=3, bias_attr=False,
padding=5, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g), trainable=False)
)
self.gaussian_filter_2 = paddle.nn.Conv2D(3, 3, 21,
groups=3, bias_attr=False,
padding=10, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g2), trainable=False)
)
self.morph_conv = paddle.nn.Conv2D(3, 3, 3, padding=1, groups=3,
padding_mode='reflect', bias_attr=False,
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
self.set_requires_grad([self.morph_conv], False)
self.set_requires_grad([self.gaussian_filter],False)
self.set_requires_grad([self.gaussian_filter_2],False)
self.steps=0
self.cross_entropy = paddle.nn.CrossEntropyLoss()
self.mxdog_weight = 1
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
self.cF = self.nets['net_enc'](self.ci)
self.sF = self.nets['net_enc'](self.si)
#dual_tensor = paddle.concat(x=[self.ci, self.si], axis=1)
self.stylized, self.map_loss, self.logits, self.target = self.nets['net_vit'](self.ci, self.si)
self.visual_items['stylized'] = self.stylized
#self.stylized = self.nets['net_vit'](self.stylized)
#self.visual_items['stylized_vit'] = self.stylized
def backward_Dec(self):
crossentropy_loss = self.cross_entropy(self.logits, self.target)
self.cX,_ = xdog(self.ci.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,gamma=self.gamma,morph_cutoff=self.morph_cutoff,morphs=1)
self.sX,_ = xdog(self.si.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,gamma=self.gamma,morph_cutoff=self.morph_cutoff,morphs=1)
self.cXF = self.nets['net_enc'](self.cX)
self.sXF = self.nets['net_enc'](self.sX)
stylized_dog,_ = xdog(paddle.clip(self.stylized,min=0,max=1),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,gamma=self.gamma,morph_cutoff=self.morph_cutoff,morphs=1)
self.cdogF = self.nets['net_enc'](stylized_dog)
self.tF = self.nets['net_enc'](self.stylized)
"""content loss"""
self.loss_c = 0
for idx, layer in enumerate(self.content_layers):
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""IDENTITY LOSSES"""
#dual_ci = paddle.concat(x=[self.ci, self.ci], axis=1)
self.Iss, book_loss_s = self.nets['net_vit'](self.sF,self.sF)
self.l_identity1 = self.calc_content_loss(self.Iss, self.si)
self.Fss = self.nets['net_enc'](self.Iss)
self.l_identity2 = 0
for layer in self.content_layers:
self.l_identity2 += self.calc_content_loss(self.Fss[layer],
self.sF[layer])
self.losses['l_identity1'] = self.l_identity1
self.losses['l_identity2'] = self.l_identity2
self.Icc, book_loss = self.nets['net_vit'](self.cF,self.cF)
self.l_identity3 = self.calc_content_loss(self.Icc, self.ci)
self.Fcc = self.nets['net_enc'](self.Icc)
self.l_identity4 = 0
for layer in self.content_layers:
self.l_identity4 += self.calc_content_loss(self.Fcc[layer],
self.cF[layer])
self.losses['l_identity3'] = self.l_identity3
self.losses['l_identity4'] = self.l_identity4
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.tF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.tF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
mxdog_content = self.calc_content_loss(self.tF['r31'], self.cXF['r31'])+self.calc_content_loss(self.tF['r41'], self.cXF['r41'])
mxdog_content_contraint = self.calc_content_loss(self.cdogF['r31'], self.cXF['r31'])+self.calc_content_loss(self.cdogF['r41'], self.cXF['r41'])
mxdog_style = self.mse_loss(self.cdogF['r31'],self.sXF['r31']) + self.mse_loss(self.cdogF['r41'],self.sXF['r41'])
self.losses['loss_MD'] = mxdog_content*.3
self.losses['loss_CnsC'] = mxdog_content_contraint*100
self.losses['loss_CnsS'] = mxdog_style * 1000
mxdog_losses = mxdog_content * .3 + mxdog_content_contraint *100 + mxdog_style * 1000
self.losses['map_loss'] = self.map_loss
self.losses['cross_entropy'] = crossentropy_loss
pred_fake = self.nets['netD'](self.stylized)
self.loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = self.loss_G_GAN
self.loss = self.loss_G_GAN + self.loss_c * self.content_weight + self.style_weight * (self.loss_s+self.loss_style_remd*3)+\
self.l_identity1 * 25 + self.l_identity2 * .5 + \
self.l_identity3 * 50 + self.l_identity4 * 1 + \
self.loss_content_relt * 16 +\
self.map_loss + book_loss + book_loss_s+\
mxdog_losses + crossentropy_loss
return self.loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD'](self.stylized.detach())
self.loss_D_fake = self.gan_criterion(pred_fake, False)
pred_real = self.nets['netD'](self.ci)
self.loss_D_real = self.gan_criterion(pred_real, True)
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.losses['D_fake_loss'] = self.loss_D_fake
self.losses['D_real_loss'] = self.loss_D_real
return self.loss_D
def train_iter(self, optimizers=None):
"""Calculate losses, gradients, and update network weights"""
self.steps+=1
with paddle.amp.auto_cast():
self.forward()
self.set_requires_grad(self.nets['netD'], True)
loss = self.backward_D()
scaled = self.scaler.scale(loss)
scaled.backward()
self.scaler.minimize(optimizers['optimD'], scaled)
optimizers['optimD'].clear_grad()
self.set_requires_grad(self.nets['netD'], False)
with paddle.amp.auto_cast():
self.forward()
loss = self.backward_Dec()
scaled = self.scaler.scale(loss)
scaled.backward()
self.scaler.minimize(optimizers['optimG'], scaled)
optimizers['optimG'].clear_grad()
#self.optimizers['optimG'].step()
if self.steps%200==0:
for param in self.nets['net_vit'].parameters():
if 'pos_emb' in param.name:
print(param)
def tensor_resample(tensor, dst_size, mode='bilinear'):
return F.interpolate(tensor, dst_size, mode=mode, align_corners=False)
def laplacian(x):
"""
Laplacian
return:
x - upsample(downsample(x))
"""
return x - tensor_resample(
tensor_resample(x, [x.shape[2] // 2, x.shape[3] // 2]),
[x.shape[2], x.shape[3]])
def laplacian_conv(x,kernel):
lap = kernel(x)
return lap
def make_laplace_pyramid(x, levels):
"""
Make Laplacian Pyramid
"""
pyramid = []
current = x
for i in range(levels):
pyramid.append(laplacian(current))
current = tensor_resample(
current,
(max(current.shape[2] // 2, 1), max(current.shape[3] // 2, 1)))
pyramid.append(current)
return pyramid
def make_laplace_conv_pyramid(x, levels,kernel):
"""
Make Laplacian Pyramid
"""
pyramid = []
current = x
for i in range(levels):
lap = kernel(current)
pyramid.append(lap)
current = tensor_resample(
current,
(max(current.shape[2] // 2, 1), max(current.shape[3] // 2, 1)))
pyramid.append(current)
return pyramid
def fold_laplace_pyramid(pyramid):
"""
Fold Laplacian Pyramid
"""
current = pyramid[-1]
for i in range(len(pyramid) - 2, -1, -1): # iterate from len-2 to 0
up_h, up_w = pyramid[i].shape[2], pyramid[i].shape[3]
current = pyramid[i] + tensor_resample(current, (up_h, up_w))
return current
def fold_laplace_patch(pyramid,patch=False):
"""
Fold Laplacian Pyramid
"""
current = pyramid[-1]
for i in range(len(pyramid) - 2, -1, -1): # iterate from len-2 to 0
up_h, up_w = pyramid[i].shape[2], pyramid[i].shape[3]
current = pyramid[i] + tensor_resample(current, (up_h, up_w))
if not type(patch)==bool:
for i in patch:
current = current+i
return current
@MODELS.register()
class LapStyleRevFirstModel(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleRevFirstModel, self).__init__()
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define revision-net params
self.nets['net_rev'] = build_generator(revnet_generator)
init_weights(self.nets['net_rev'])
self.nets['netD'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
self.pyr_ci = make_laplace_pyramid(self.ci, 1)
self.pyr_si = make_laplace_pyramid(self.si, 1)
self.pyr_ci.append(self.ci)
self.pyr_si.append(self.si)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](self.pyr_ci[1])
sF = self.nets['net_enc'](self.pyr_si[1])
stylized_small = self.nets['net_dec'](cF, sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[0], stylized_up], axis=1)
stylized_rev_lap = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.stylized = stylized_rev
self.visual_items['stylized'] = self.stylized
def backward_G(self):
self.tF = self.nets['net_enc'](self.stylized)
self.cF = self.nets['net_enc'](self.pyr_ci[2])
self.sF = self.nets['net_enc'](self.pyr_si[2])
"""content loss"""
self.loss_c = 0
for layer in self.content_layers:
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.tF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.tF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
"""gan loss"""
pred_fake = self.nets['netD'](self.stylized)
self.loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = self.loss_G_GAN
self.loss = self.loss_G_GAN + self.loss_c * self.content_weight + self.loss_s * self.style_weight +\
self.loss_style_remd * 10 + self.loss_content_relt * 16
self.loss.backward()
return self.loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD'](self.stylized.detach())
self.loss_D_fake = self.gan_criterion(pred_fake, False)
pred_real = self.nets['netD'](self.pyr_si[2])
self.loss_D_real = self.gan_criterion(pred_real, True)
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.loss_D.backward()
self.losses['D_fake_loss'] = self.loss_D_fake
self.losses['D_real_loss'] = self.loss_D_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD'], True)
optimizers['optimD'].clear_grad()
self.backward_D()
optimizers['optimD'].step()
# update G
self.set_requires_grad(self.nets['netD'], False)
optimizers['optimG'].clear_grad()
self.backward_G()
optimizers['optimG'].step()
@MODELS.register()
class LapStyleRevFirstMXDOG(BaseModel):
def __init__(self,
revnet_generator,
revnet_first_discriminator,
draftnet_encode,
draftnet_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gram_errors=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleRevFirstMXDOG, self).__init__()
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define revision-net params
self.nets['net_rev'] = build_generator(revnet_generator)
init_weights(self.nets['net_rev'])
self.nets['netD_first'] = build_discriminator(revnet_first_discriminator)
init_weights(self.nets['netD_first'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gram_errors = build_criterion(gram_errors)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
g=np.repeat(gaussian(7, 1).numpy(),3,axis=0)
g2=np.repeat(gaussian(19, 3).numpy(),3,axis=0)
self.gaussian_filter = paddle.nn.Conv2D(3, 3,7,
groups=3, bias_attr=False,
padding=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g), trainable=False)
)
self.gaussian_filter_2 = paddle.nn.Conv2D(3, 3,19,
groups=3, bias_attr=False,
padding=9, padding_mode='reflect',
weight_attr = paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(value=g2), trainable=False)
)
self.morph_conv = paddle.nn.Conv2D(3,3,3,padding=1,groups=3,
padding_mode='reflect',bias_attr=False,
weight_attr = paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
l = np.repeat(np.array([[[[-8,-8,-8],[-8,1,-8],[-8,-8,-8]]]]),3,axis=0)
self.lap_filter = paddle.nn.Conv2D(3,3,(3,3),stride=1,bias_attr=False,
padding=1, groups=3,padding_mode='reflect',
weight_attr = paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=l), trainable=False)
)
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
self.pyr_ci = make_laplace_conv_pyramid(self.ci, 1,self.lap_filter)
self.pyr_si = make_laplace_conv_pyramid(self.si, 1,self.lap_filter)
self.pyr_ci.append(self.ci)
self.pyr_si.append(self.si)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](self.pyr_ci[1])
sF = self.nets['net_enc'](self.pyr_si[1])
stylized_small = self.nets['net_dec'](cF, sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[0], stylized_up], axis=1)
stylized_rev_lap,_ = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.stylized = stylized_rev
self.visual_items['stylized'] = self.stylized
def backward_G(self):
self.tF = self.nets['net_enc'](self.stylized)
self.cF = self.nets['net_enc'](self.pyr_ci[2])
self.sF = self.nets['net_enc'](self.pyr_si[2])
"""content loss"""
self.loss_c = 0
for layer in self.content_layers:
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.tF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.tF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
"""gan loss"""
pred_fake = self.nets['netD_first'](self.stylized)
self.loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = self.loss_G_GAN
self.cX,_ = xdog(self.ci.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2)
self.sX,_ = xdog(self.si.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2)
self.cXF = self.nets['net_enc'](self.cX)
self.sXF = self.nets['net_enc'](self.sX)
self.visual_items['cx'] = self.cX
self.visual_items['sx'] = self.sX
stylized_dog,_ = xdog(self.stylized,self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2)
self.cdogF = self.nets['net_enc'](stylized_dog)
mxdog_content = self.calc_content_loss(self.tF['r31'], self.cXF['r31'])+self.calc_content_loss(self.tF['r41'], self.cXF['r41'])
mxdog_content_contraint = self.calc_content_loss(self.cdogF['r31'], self.cXF['r31'])+self.calc_content_loss(self.cdogF['r41'], self.cXF['r41'])
mxdog_content_img = self.gram_errors(self.cdogF['r31'],self.sXF['r31'])+self.gram_errors(self.cdogF['r41'],self.sXF['r41'])
self.losses['loss_MD_p'] = mxdog_content*.3
self.losses['loss_CnsC_p'] = mxdog_content_contraint*100
self.losses['loss_CnsS_p'] = mxdog_content_img*1000
mxdogloss=mxdog_content * .3 + mxdog_content_contraint *100 + mxdog_content_img * 1000
self.loss = self.loss_G_GAN*1.5 + self.loss_c * self.content_weight + self.style_weight * (self.loss_s +\
self.loss_style_remd * 3) + self.loss_content_relt * 20 + mxdogloss
self.loss.backward()
return self.loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD_first'](self.stylized.detach())
self.loss_D_fake = self.gan_criterion(pred_fake, False)
pred_real = self.nets['netD_first'](self.pyr_si[2])
self.loss_D_real = self.gan_criterion(pred_real, True)
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.loss_D.backward()
self.losses['D_fake_loss'] = self.loss_D_fake
self.losses['D_real_loss'] = self.loss_D_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD_first'], True)
optimizers['optimD'].clear_grad()
self.backward_D()
optimizers['optimD'].step()
# update G
self.set_requires_grad(self.nets['netD_first'], False)
optimizers['optimG'].clear_grad()
self.backward_G()
optimizers['optimG'].step()
@MODELS.register()
class LapStyleRevSecondModel(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleRevSecondModel, self).__init__()
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define the first revnet params
self.nets['net_rev'] = build_generator(revnet_generator)
self.set_requires_grad([self.nets['net_rev']], False)
# define the second revnet params
self.nets['net_rev_2'] = build_generator(revnet_generator)
init_weights(self.nets['net_rev_2'])
self.nets['netD'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
self.pyr_ci = make_laplace_pyramid(self.ci, 2)
self.pyr_si = make_laplace_pyramid(self.si, 2)
self.pyr_ci.append(self.ci)
self.pyr_si.append(self.si)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](self.pyr_ci[2])
sF = self.nets['net_enc'](self.pyr_si[2])
stylized_small = self.nets['net_dec'](cF, sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[1], stylized_up], axis=1)
stylized_rev_lap = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.visual_items['stylized_rev_first'] = stylized_rev
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[0], stylized_up], axis=1)
stylized_rev_lap_second = self.nets['net_rev_2'](revnet_input)
stylized_rev_second = fold_laplace_pyramid(
[stylized_rev_lap_second, stylized_rev_lap, stylized_small])
self.stylized = stylized_rev_second
self.visual_items['stylized'] = self.stylized
def backward_G(self):
self.tF = self.nets['net_enc'](self.stylized)
self.cF = self.nets['net_enc'](self.pyr_ci[3])
self.sF = self.nets['net_enc'](self.pyr_si[3])
"""content loss"""
self.loss_c = 0
for layer in self.content_layers:
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(self.tF['r41'],
self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
"""gan loss"""
pred_fake = self.nets['netD'](self.stylized)
self.loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = self.loss_G_GAN
self.loss = self.loss_G_GAN + self.loss_c * self.content_weight + self.loss_s * self.style_weight +\
self.loss_style_remd * 10 + self.loss_content_relt * 16
self.loss.backward()
return self.loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD'](self.stylized.detach())
self.loss_D_fake = self.gan_criterion(pred_fake, False)
pred_real = self.nets['netD'](self.pyr_si[3])
self.loss_D_real = self.gan_criterion(pred_real, True)
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.loss_D.backward()
self.losses['D_fake_loss'] = self.loss_D_fake
self.losses['D_real_loss'] = self.loss_D_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD'], True)
optimizers['optimD'].clear_grad()
self.backward_D()
optimizers['optimD'].step()
# update G
self.set_requires_grad(self.nets['netD'], False)
optimizers['optimG'].clear_grad()
self.backward_G()
optimizers['optimG'].step()
@MODELS.register()
class LapStyleDraThumbModel(BaseModel):
def __init__(self,
generator_encode,
generator_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleDraThumbModel, self).__init__()
# define generators
self.nets['net_enc'] = build_generator(generator_encode)
self.nets['net_dec'] = build_generator(generator_decode)
init_weights(self.nets['net_dec'])
self.set_requires_grad([self.nets['net_enc']], False)
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.cp = paddle.to_tensor(input['cp'])
self.sp = paddle.to_tensor(input['sp'])
self.visual_items['cp'] = self.cp
self.position = input['position']
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
self.cF = self.nets['net_enc'](self.ci)
self.sF = self.nets['net_enc'](self.si)
self.cpF = self.nets['net_enc'](self.cp)
self.stylized_thumb,self.stylized_thumb_feat = self.nets['net_dec'](self.cF, self.sF, self.cpF, 'thumb')
#self.stylized_patch,self.stylized_patch_feat = self.nets['net_dec'](self.cF, self.sF, self.cpF, 'patch')
self.visual_items['stylized_thumb'] = self.stylized_thumb
#self.visual_items['stylized_patch'] = self.stylized_patch
self.visual_items['style']=self.si
def backward_Dec(self):
with paddle.no_grad():
g_t_thumb_up = F.interpolate(self.visual_items['stylized_thumb'], scale_factor=2, mode='bilinear', align_corners=False)
g_t_thumb_crop = paddle.slice(g_t_thumb_up,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
self.tt_cropF = self.nets['net_enc'](g_t_thumb_crop)
#style_patch = F.interpolate(self.visual_items['si'], scale_factor=2, mode='bilinear', align_corners=False)
#style_patch_crop = paddle.slice(style_patch,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
#self.spCrop = self.nets['net_enc'](self.sp)
self.ttF = self.nets['net_enc'](self.stylized_thumb)
self.tpF = self.nets['net_enc'](self.stylized_patch)
"""content loss"""
self.loss_content=0
for layer in self.content_layers:
self.loss_content += self.calc_content_loss(self.ttF[layer],
self.cF[layer],
norm=True)
self.losses['loss_content'] = self.loss_content
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.ttF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
self.Icc,_ = self.nets['net_dec'](self.cF, self.cF, self.cpF,'thumb')
self.l_identity1 = self.calc_content_loss(self.Icc, self.ci)
self.Fcc = self.nets['net_enc'](self.Icc)
self.l_identity2 = 0
for layer in self.content_layers:
self.l_identity2 += self.calc_content_loss(self.Fcc[layer],
self.cF[layer])
self.loss_style_remd = self.calc_style_emd_loss(
self.ttF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.ttF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.ttF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.ttF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
"""patch loss"""
self.loss_patch = 0
#self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[-2]]:
self.loss_patch += self.calc_content_loss(self.tpF[layer],
self.tt_cropF[layer])
self.losses['loss_patch'] = self.loss_patch
self.losses['l_identity1'] = self.l_identity1
self.losses['l_identity2'] = self.l_identity2
self.loss = self.loss_s * self.style_weight +\
self.l_identity1 * 50 + self.l_identity2 * 1 +\
self.loss_content * self.content_weight+\
self.loss_style_remd * 18 +\
self.loss_content_relt * 24 +self.loss_patch * 18 * self.content_weight
self.loss.backward()
return self.loss
def train_iter(self, optimizers=None):
"""Calculate losses, gradients, and update network weights"""
self.forward()
optimizers['optimG'].clear_grad()
self.backward_Dec()
self.optimizers['optimG'].step()
@MODELS.register()
class LapStyleRevFirstThumb(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0,
ada_alpha=1.0,
style_patch_alpha=.5,
use_mxdog=0):
super(LapStyleRevFirstThumb, self).__init__()
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define revision-net params
self.nets['net_rev'] = build_generator(revnet_generator)
init_weights(self.nets['net_rev'])
self.nets['netD'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD'])
self.nets['netD_patch'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD_patch'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
self.ada_alpha = ada_alpha
self.style_patch_alpha = style_patch_alpha
self.use_mxdog = use_mxdog
if self.use_mxdog==1:
g=np.repeat(gaussian(7, 1).numpy(),3,axis=0)
g2=np.repeat(gaussian(19, 3).numpy(),3,axis=0)
self.gaussian_filter = paddle.nn.Conv2D(3, 3,7,
groups=3, bias_attr=False,
padding=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g), trainable=False)
)
self.gaussian_filter_2 = paddle.nn.Conv2D(3, 3,19,
groups=3, bias_attr=False,
padding=9, padding_mode='reflect',
weight_attr = paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(value=g2), trainable=False)
)
self.morph_conv = paddle.nn.Conv2D(3,3,3,padding=1,groups=3,
padding_mode='reflect',bias_attr=False,
weight_attr = paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
l = np.repeat(np.array([[[[-8,-8,-8],[-8,1,-8],[-8,-8,-8]]]]),3,axis=0)
self.lap_filter = paddle.nn.Conv2D(3,3,(3,3),stride=1,bias_attr=False,
padding=1, groups=3,padding_mode='reflect',
weight_attr = paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=l), trainable=False)
)
def setup_input(self, input):
self.position = input['position']
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.cp = input['cp']
self.sp = input['sp']
self.visual_items['cp'] = self.cp
self.pyr_ci = make_laplace_conv_pyramid(self.ci, 1,self.lap_filter)
self.pyr_si = make_laplace_conv_pyramid(self.si, 1,self.lap_filter)
self.pyr_cp = make_laplace_conv_pyramid(self.cp, 1,self.lap_filter)
self.pyr_ci.append(self.ci)
self.pyr_si.append(self.si)
self.pyr_cp.append(self.cp)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](self.pyr_ci[1])
sF = self.nets['net_enc'](self.pyr_si[1])
transformed = paddle.slice(self.sp, axes=[2, 3], starts=[self.position[0], self.position[2]],
ends=[self.position[1], self.position[3]])
self.spF = self.nets['net_enc'](transformed)
self.cpF = self.nets['net_enc'](self.cp)
stylized_small = self.nets['net_dec'](cF, sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[0], stylized_up], axis=1)
stylized_rev_lap,stylized_feats = self.nets['net_rev'](revnet_input.detach())
#self.ttF_res=self.ttF_res.detach()
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
p_stylized_up = paddle.slice(stylized_up,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
p_revnet_input = paddle.concat(x=[self.pyr_cp[0], p_stylized_up], axis=1)
p_stylized_rev_lap,stylized_feats = self.nets['net_rev'](p_revnet_input.detach(),stylized_feats.detach(),self.ada_alpha)
p_stylized_rev = fold_laplace_pyramid([p_stylized_rev_lap, p_stylized_up.detach()])
self.stylized = stylized_rev
self.p_stylized = p_stylized_rev
self.visual_items['stylized_up'] = stylized_up
self.visual_items['stylized'] = self.stylized
self.visual_items['stylized_patch'] = self.p_stylized
def backward_G(self, optimizer):
self.cF = self.nets['net_enc'](self.ci)
self.sF = self.nets['net_enc'](self.si)
with paddle.no_grad():
g_t_thumb_up = F.interpolate(self.visual_items['stylized'], scale_factor=2, mode='bilinear', align_corners=False)
g_t_thumb_crop = paddle.slice(g_t_thumb_up,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
self.tt_cropF = self.nets['net_enc'](g_t_thumb_crop)
self.ttF = self.nets['net_enc'](self.stylized)
self.tpF = self.nets['net_enc'](self.p_stylized)
self.loss_content = 0
for layer in self.content_layers:
self.loss_content += self.calc_content_loss(self.ttF[layer],
self.cF[layer],
norm=True)
self.losses['loss_content'] = self.loss_content
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.ttF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.ttF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.ttF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.ttF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.ttF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
pred_fake = self.nets['netD'](self.stylized)
self.loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = self.loss_G_GAN
if self.use_mxdog==1:
self.cX = xdog(self.ci.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv)
self.sX = xdog(self.si.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv)
self.cXF = self.nets['net_enc'](self.cX)
self.sXF = self.nets['net_enc'](self.sX)
self.visual_items['cx'] = self.cX
self.visual_items['sx'] = self.sX
stylized_dog = xdog(self.stylized,self.gaussian_filter,self.gaussian_filter_2,self.morph_conv)
self.cdogF = self.nets['net_enc'](stylized_dog)
mxdog_content = self.calc_content_loss(self.ttF['r31'], self.cXF['r31'])
mxdog_content_contraint = self.calc_content_loss(self.cdogF['r31'], self.cXF['r31'])
mxdog_content_img = self.calc_style_loss(self.cdogF['r31'],self.sXF['r31'])
self.losses['loss_MD_p'] = mxdog_content*.05
self.losses['loss_CnsC_p'] = mxdog_content_contraint*100
self.losses['loss_CnsS_p'] = mxdog_content_img*500
mxdogloss=mxdog_content * .0125 + mxdog_content_contraint *25 + mxdog_content_img * 125
else:
mxdogloss=0
self.loss = self.loss_G_GAN + self.loss_s * self.style_weight +\
self.loss_content * self.content_weight+\
self.loss_style_remd * 16 +\
self.loss_content_relt * 16 + mxdogloss
self.loss.backward()
optimizer.step()
"""patch loss"""
self.loss_patch = 0
# self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[-2]]:
self.loss_patch += self.calc_content_loss(self.tpF[layer],
self.tt_cropF[layer])
self.losses['loss_patch'] = self.loss_patch
self.loss_content_p = 0
for layer in self.content_layers:
self.loss_content_p += self.calc_content_loss(self.tpF[layer],
self.cpF[layer],
norm=True)
self.losses['loss_content_p'] = self.loss_content_p
self.loss_ps = 0
for layer in self.style_layers:
self.loss_ps += self.calc_style_loss(self.tpF[layer],
self.sF[layer])
self.losses['loss_ps'] = self.loss_ps
self.loss_psp = 0
'''
for layer in self.content_layers:
self.loss_psp += self.calc_style_loss(self.tpF[layer],
self.spF[layer])
self.losses['loss_psp'] = self.loss_psp
'''
style_mix_loss = self.loss_psp * self.style_patch_alpha + (1-self.style_patch_alpha)*self.loss_ps
self.p_loss_style_remd = self.calc_style_emd_loss(
self.tpF['r31'], self.spF['r31']) + self.calc_style_emd_loss(
self.tpF['r41'], self.spF['r41'])
self.p_loss_content_relt = self.calc_content_relt_loss(
self.tpF['r31'], self.cpF['r31']) + self.calc_content_relt_loss(
self.tpF['r41'], self.cpF['r41'])
self.losses['p_loss_style_remd'] = self.p_loss_style_remd
self.losses['p_loss_content_relt'] = self.p_loss_content_relt
"""gan loss"""
pred_fake_p = self.nets['netD_patch'](self.p_stylized)
self.loss_Gp_GAN = self.gan_criterion(pred_fake_p, True)
self.losses['loss_gan_Gp'] = self.loss_Gp_GAN
if self.use_mxdog==1:
self.cX = xdog(self.cp.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv)
self.cXF = self.nets['net_enc'](self.cX)
self.visual_items['cx'] = self.cX
stylized_dog = xdog(self.p_stylized,self.gaussian_filter,self.gaussian_filter_2,self.morph_conv)
self.cdogF = self.nets['net_enc'](stylized_dog)
mxdog_content = self.calc_content_loss(self.tpF['r31'], self.cXF['r31'])
mxdog_content_contraint = self.calc_content_loss(self.cdogF['r31'], self.cXF['r31'])
mxdog_content_img = self.calc_style_loss(self.cdogF['r31'],self.sXF['r31'])
self.losses['loss_MD'] = mxdog_content*.05
self.losses['loss_CnsC'] = mxdog_content_contraint*100
self.losses['loss_CnsS'] = mxdog_content_img*500
mxdogloss=mxdog_content * .0125 + mxdog_content_contraint *25 + mxdog_content_img * 125
else:
mxdogloss=0
self.loss = self.loss_Gp_GAN * 2 +style_mix_loss * self.style_weight +\
self.loss_content_p * self.content_weight +\
self.loss_content_p * self.content_weight +\
self.loss_patch * self.content_weight * 20 +\
self.p_loss_style_remd * 26 + self.p_loss_content_relt * 26 + mxdogloss
self.loss.backward()
return self.loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD'](self.stylized.detach())
self.loss_D_fake = self.gan_criterion(pred_fake, False)
pred_real = self.nets['netD'](self.pyr_si[2])
self.loss_D_real = self.gan_criterion(pred_real, True)
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.loss_D.backward()
self.losses['D_fake_loss'] = self.loss_D_fake
self.losses['D_real_loss'] = self.loss_D_real
def backward_Dpatch(self):
"""Calculate GAN loss for the patch discriminator"""
pred_p_fake = self.nets['netD_patch'](self.p_stylized.detach())
self.loss_Dp_fake = self.gan_criterion(pred_p_fake, False)
pred_Dp_real = 0
reshaped = paddle.slice(self.sp, axes=[2, 3], starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
self.loss_Dp_real = self.nets['netD_patch'](reshaped)
pred_Dp_real += self.gan_criterion(self.loss_Dp_real, True)
self.loss_D_patch = (self.loss_Dp_fake + pred_Dp_real) * 0.5
self.loss_D_patch.backward()
self.losses['Dp_fake_loss'] = self.loss_Dp_fake
self.losses['Dp_real_loss'] = pred_Dp_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD'], True)
optimizers['optimD'].clear_grad()
self.backward_D()
optimizers['optimD'].step()
self.set_requires_grad(self.nets['netD_patch'], True)
optimizers['optimD_patch'].clear_grad()
self.backward_Dpatch()
optimizers['optimD_patch'].step()
# update G
self.set_requires_grad(self.nets['netD'], False)
self.set_requires_grad(self.nets['netD_patch'], False)
optimizers['optimG'].clear_grad()
self.backward_G(optimizers['optimG'])
optimizers['optimG'].step()
@MODELS.register()
class LapStyleRevSecondThumb(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleRevSecondThumb, self).__init__()
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define the first revnet params
self.nets['net_rev'] = build_generator(revnet_generator)
self.set_requires_grad([self.nets['net_rev']], False)
# define the second revnet params
self.nets['net_rev_2'] = build_generator(revnet_generator)
init_weights(self.nets['net_rev_2'])
self.nets['netD'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.sp = paddle.to_tensor(input['sp'])
self.image_paths = input['ci_path']
self.cp = paddle.to_tensor(input['cp'])
self.visual_items['cp'] = self.cp
self.position = input['position']
self.pyr_ci = make_laplace_pyramid(self.ci, 2)
self.pyr_si = make_laplace_pyramid(self.si, 2)
self.pyr_cp = make_laplace_pyramid(self.cp, 2)
self.pyr_ci.append(self.ci)
self.pyr_si.append(self.si)
self.pyr_cp.append(self.cp)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](self.pyr_ci[2])
sF = self.nets['net_enc'](self.pyr_si[2])
cpF = self.nets['net_enc'](self.pyr_cp[2])
self.spCrop = self.nets['net_enc'](self.sp)
stylized_small, _ = self.nets['net_dec'](cF, sF, cpF, 'thumb')
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[1], stylized_up], axis=1)
#rev_net thumb only calcs as patch if second parameter is passed
stylized_rev_lap, self.stylized_thumb_feat = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.visual_items['stylized_rev_first'] = stylized_rev
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[0], stylized_up], axis=1)
stylized_rev_lap_second, self.stylized_thumb_large = self.nets['net_rev_2'](revnet_input)
stylized_rev_second = fold_laplace_pyramid(
[stylized_rev_lap_second, stylized_rev_lap, stylized_small])
self.stylized = stylized_rev_second
self.visual_items['stylized'] = self.stylized
stylized_small, _ = self.nets['net_dec'](cF, sF, cpF, 'patch')
self.visual_items['p_stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_cp[1], stylized_up], axis=1)
stylized_rev_lap, _ = self.nets['net_rev'](revnet_input, self.stylized_thumb_feat)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.visual_items['p_stylized_rev_first'] = stylized_rev
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_cp[0], stylized_up], axis=1)
stylized_rev_lap_second,_ = self.nets['net_rev_2'](revnet_input,self.stylized_thumb_large)
stylized_rev_second = fold_laplace_pyramid(
[stylized_rev_lap_second, stylized_rev_lap, stylized_small])
self.p_stylized = stylized_rev_second
self.visual_items['p_stylized'] = self.p_stylized
def backward_G(self):
cF = self.nets['net_enc'](self.ci)
sF = self.nets['net_enc'](self.si)
ttF = self.nets['net_enc'](self.stylized)
loss_content = 0
for layer in self.content_layers:
loss_content += self.calc_content_loss(ttF[layer],
cF[layer],
norm=True)
self.losses['loss_content'] = loss_content
"""style loss"""
loss_s = 0
for layer in self.style_layers:
loss_s += self.calc_style_loss(ttF[layer], sF[layer])
self.losses['loss_s'] = loss_s
"""relative loss"""
loss_style_remd = self.calc_style_emd_loss(
ttF['r31'], sF['r31']) + self.calc_style_emd_loss(
ttF['r41'], sF['r41'])
loss_content_relt = self.calc_content_relt_loss(
ttF['r31'], cF['r31']) + self.calc_content_relt_loss(
ttF['r41'], cF['r41'])
self.losses['loss_style_remd'] = loss_style_remd
self.losses['loss_content_relt'] = loss_content_relt
pred_fake = self.nets['netD'](self.stylized)
loss_G_GAN = self.gan_criterion(pred_fake, True)
self.losses['loss_gan_G'] = loss_G_GAN
loss = loss_G_GAN + loss_s * self.style_weight + \
loss_content * self.content_weight + \
loss_style_remd * 20 + \
loss_content_relt * 24
loss.backward()
return loss
def backward_G_p(self):
spCrop = self.nets['net_enc'](self.sp)
tpF = self.nets['net_enc'](self.p_stylized)
cpF = self.nets['net_enc'](self.cp)
with paddle.no_grad():
g_t_thumb_up = F.interpolate(self.visual_items['stylized'], scale_factor=2, mode='bilinear',
align_corners=False)
g_t_thumb_crop = paddle.slice(g_t_thumb_up, axes=[2, 3], starts=[self.position[0], self.position[2]],
ends=[self.position[1], self.position[3]])
tt_cropF = self.nets['net_enc'](g_t_thumb_crop)
"""patch loss"""
loss_patch = 0
# self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[3]]:
loss_patch += self.calc_content_loss(tpF[layer],
tt_cropF[layer])
self.losses['loss_patch'] = loss_patch
loss_content_p = 0
for layer in self.content_layers:
loss_content_p += self.calc_content_loss(tpF[layer],
cpF[layer],
norm=True)
self.losses['loss_content_p'] = loss_content_p
loss_ps = 0
for layer in self.style_layers:
loss_ps += self.calc_style_loss(tpF[layer], spCrop[layer])
self.losses['loss_ps'] = loss_ps
p_loss_style_remd = self.calc_style_emd_loss(
tpF['r31'], tt_cropF['r31']) + self.calc_style_emd_loss(
tpF['r41'], tt_cropF['r41'])
p_loss_content_relt = self.calc_content_relt_loss(
tpF['r31'], cpF['r31']) + self.calc_content_relt_loss(
tpF['r41'], cpF['r41'])
self.losses['p_loss_style_remd'] = p_loss_style_remd
self.losses['p_loss_content_relt'] = p_loss_content_relt
patch_loss = loss_ps * self.style_weight + \
loss_content_p * self.content_weight + \
loss_patch * self.content_weight + \
p_loss_style_remd * 20 + p_loss_content_relt * 24
patch_loss.backward()
return patch_loss
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_fake = self.nets['netD'](self.stylized.detach())
loss_D_fake = self.gan_criterion(pred_fake, False)
pred_p_fake = self.nets['netD'](self.p_stylized.detach())
loss_Dp_fake = self.gan_criterion(pred_p_fake, False)
pred_real = self.nets['netD'](self.pyr_si[3])
loss_D_real = self.gan_criterion(pred_real, True)
pred_p_real = self.nets['netD'](self.sp)
loss_Dp_real = self.gan_criterion(pred_p_real, True)
self.loss_D = (loss_D_fake + loss_Dp_fake + loss_Dp_real + loss_D_real) * 0.5
self.loss_D.backward()
self.losses['D_fake_loss'] = loss_D_fake
self.losses['D_real_loss'] = loss_D_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD'], True)
optimizers['optimD'].clear_grad()
self.backward_D()
optimizers['optimD'].step()
# update G
self.set_requires_grad(self.nets['netD'], False)
optimizers['optimG'].clear_grad()
self.backward_G()
optimizers['optimG'].step()
self.set_requires_grad(self.nets['netD'], False)
optimizers['optimG'].clear_grad()
self.backward_G_p()
optimizers['optimG'].step()
def crop_upsized(stylized_up,positions,orig_size):
ratio = orig_size/512
stylized_up=paddle.slice(stylized_up,axes=[2,3],starts=[(positions[1]/ratio).astype('int32'),(positions[0]/ratio).astype('int32')],\
ends=[(positions[3]/ratio).astype('int32'),(positions[2]/ratio).astype('int32')])
return stylized_up
def adjust(inp,size):
num=math.ceil(inp/size)+1
move=math.floor(inp/(num+1))
return move
def positions_list(axis,length):
def coords(axis,start):
if axis=='x':
return [0,start,start+256,256]
else:
return [start,0,256,start+256]
length = length
move=adjust(length,256)
for i in range(0,length-move,move):
for j in range(0,256-64,64):
for k in range(0,256-64,64):
#rev 1
for l in range(0,192,64):
for m in range(0,192,64):
#rev 2
for n in range(0,192,64):
for o in range(0,192,64):
curr_=[n+m+k+i,o+l+j] if axis=='x' else [n+m+k,i+o+l+j]
lol=[coords(axis,i),
[k,j,j+128,k+128],
[m,l,l+128,m+128],
[n,o,o+128,n+128],
curr_]
yield lol
@MODELS.register()
class LapStyleRevSecondPatch(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
revnet_deep_generator,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0,
ada_alpha=1.0,
ada_alpha_2=1.0,
gan_thumb_weight=1.0,
gan_patch_weight=1.0):
super(LapStyleRevSecondPatch, self).__init__()
self.scaler = paddle.amp.GradScaler(init_loss_scaling=1024)
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define the first revnet params
self.nets['net_rev'] = build_generator(revnet_generator)
self.set_requires_grad([self.nets['net_rev']], False)
# define the second revnet params
self.nets['net_rev_2'] = build_generator(revnet_deep_generator)
init_weights(self.nets['net_rev_2'])
self.nets['net_rev_3'] = build_generator(revnet_deep_generator)
init_weights(self.nets['net_rev_3'])
self.nets['net_rev_4'] = build_generator(revnet_deep_generator)
init_weights(self.nets['net_rev_4'])
if self.is_train:
self.nets['netD'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD'])
self.nets['netD_patch'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD_patch'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
self.ada_alpha = ada_alpha
self.ada_alpha_2 = ada_alpha_2
self.gan_thumb_weight = gan_thumb_weight
self.gan_patch_weight = gan_patch_weight
l = np.repeat(np.array([[[[-8, -8, -8], [-8, 1, -8], [-8, -8, -8]]]]), 3, axis=0)
self.lap_kernel = paddle.nn.Conv2D(3, 3, (3, 3), stride=1, bias_attr=False,
padding=1, groups=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=l), trainable=False)
)
def test_iter(self, output_dir=None,metrics=None):
self.eval()
self.output_dir=output_dir
self.laplacians=[laplacian_conv(self.content_stack[0],self.lap_kernel)]
self.out_images=[]
print('content_size='+str(self.content.shape))
for i in [.25,.5,1]:
if i==1:
self.laplacians.append(laplacian_conv(self.content,self.lap_kernel))
else:
self.laplacians.append(laplacian_conv(F.interpolate(self.content, scale_factor=i),self.lap_kernel))
with paddle.no_grad():
cF = self.nets['net_enc'](F.interpolate(self.content_stack[0],scale_factor=.5))
sF = self.nets['net_enc'](F.interpolate(self.style_stack[0], scale_factor=.5))
stylized_small= self.nets['net_dec'](cF, sF)
self.stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.laplacians[0], self.stylized_up], axis=1)
# rev_net thumb only calcs as patch if second parameter is passed
stylized_rev_lap, self.stylized_feats = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.stylized_slice = F.interpolate(stylized_rev, scale_factor=2)
print('stylized_slice.shape = '+str(self.stylized_slice.shape))
size_x = self.stylized_slice.shape[-2]
self.in_size_x = math.floor(size_x / 2)
move_x = adjust(size_x, self.in_size_x)
ranges_x=list(range(0,size_x,self.in_size_x))
size_y = self.stylized_slice.shape[-1]
self.in_size_y = math.floor(size_y / 2)
move_y = adjust(size_y, self.in_size_y)
ranges_y = list(range(0,size_y,self.in_size_y))
orig_len_y = len(ranges_y)
orig_len_x = len(ranges_x)
curr_last_x=ranges_x[-1]
curr_last_y=ranges_y[-1]
ranges_x = ranges_x + [i+math.floor(self.in_size_x/3) for i in ranges_x[:-1]]
ranges_y = ranges_y + [i+math.floor(self.in_size_y/3) for i in ranges_y[:-1]]
ranges_x.append(curr_last_x-math.floor(self.in_size_x/3))
ranges_y.append(curr_last_y-math.floor(self.in_size_y/3))
self.counter=1
self.stylized_feats = self.nets['net_rev_2'].DownBlock(revnet_input.detach())
self.stylized_feats = self.nets['net_rev_2'].resblock(self.stylized_feats)
for idx,i in enumerate(ranges_x):
self.counter+=1
for idx2,j in enumerate(ranges_y):
self.second_set = 'b' if idx+1>orig_len_x or idx2+1>orig_len_y else 'a'
self.outer_loop=(i,j)
self.positions=[[i,j,i+self.in_size_x,j+self.in_size_y]]#!
self.test_forward(self.stylized_slice,self.stylized_feats)
positions = [(int(re.split('_|\.',i)[0]),int(re.split('_|\.',i)[1])) for i in self.labels]
set_letter = [re.split('_|\.',i)[2] for i in self.labels]
max_x = 0
max_y = 0
for a,b in positions:
if a>max_x:
max_x=a
if b>max_y:
max_y=b
max_x = max_x+self.in_size_x
max_y = max_y+self.in_size_y
print('max_x = '+str(max_x))
print('max_y = ' + str(max_y))
tiles_1 = np.zeros((max_x,max_y,3), dtype=np.uint8)
weights = np.zeros((max_x, max_y), dtype=np.uint8)
not_visited = np.empty((max_x,max_y))
not_visited[:,:]=np.nan
kernel = np.ones((self.in_size_x-64,self.in_size_y-64))
kernel = np.pad(kernel,(32,32),'linear_ramp', end_values=(0, 0))
#tiles_2 = np.zeros((max_x, max_y,3), dtype=np.uint8)
for image,b,c in zip(self.out_images,positions,set_letter):
empty = np.isnan(not_visited[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1]])
k = kernel.copy()
k = np.maximum(k,empty)
w = weights[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1]]+k
tiles_1[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1],0] = (image[:,:,0]*k +\
(tiles_1[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1],0]*(w-k)))/w
tiles_1[b[0]:b[0] + image.shape[0], b[1]:b[1] + image.shape[1], 1] = (image[:,:,1]*k +\
(tiles_1[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1],1]*(w-k)))/w
tiles_1[b[0]:b[0] + image.shape[0], b[1]:b[1] + image.shape[1], 2] = (image[:,:,2]*k +\
(tiles_1[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1],2]*(w-k)))/w
not_visited[b[0]:b[0]+image.shape[0],b[1]:b[1]+image.shape[1]]=1
weights[b[0]:b[0] + image.shape[0], b[1]:b[1] + image.shape[1]] = weights[b[0]:b[0] + image.shape[0], b[1]:b[1] + image.shape[1]]+w
for a,b in zip([tiles_1],['tiled1']):
im = Image.fromarray(a,'RGB')
label = self.path[0]+' '+b
makedirs(os.path.join(self.output_dir, 'visual_test'))
img_path = os.path.join(self.output_dir, 'visual_test',
'%s.png' % (label))
im.save(img_path)
shutil.rmtree(os.path.join(self.output_dir, 'visual_test','tiles'))
self.paths=[]
self.train()
def setup_input(self, input):
if self.is_train:
self.content_stack = []
self.style_stack = [paddle.to_tensor(input['style_stack_1']),paddle.to_tensor(input['style_stack_2']),paddle.to_tensor(input['style_stack_3'])]
self.laplacians=[]
for i in range(1,6):
if 'content_stack_'+str(i) in input:
self.content_stack.append(paddle.to_tensor(input['content_stack_'+str(i)]))
self.visual_items['ci'] = self.content_stack[0]
self.positions = input['position_stack']
self.size_stack = input['size_stack']
self.laplacians.append(laplacian(self.content_stack[0]).detach())
self.laplacians.append(laplacian(self.content_stack[1]).detach())
self.laplacians.append(laplacian(self.content_stack[2]).detach())
self.laplacians.append(laplacian(self.content_stack[3]).detach())
else:
self.labels=[]
self.content_stack=[input['ci']]
self.content=input['content']
self.style_stack = [input['si']]
self.path=input['ci_path']
def test_forward(self,stylized_slice,stylized_feats):
stylized_up = paddle.slice(stylized_slice,axes=[2,3],starts=[self.positions[0][0],self.positions[0][1]],\
ends=[self.positions[0][2],self.positions[0][3]])
lap = paddle.slice(self.laplacians[1],axes=[2,3],starts=[self.positions[0][0],self.positions[0][1]],\
ends=[self.positions[0][2],self.positions[0][3]])
revnet_input = paddle.concat(x=[lap, stylized_up], axis=1)
stylized_rev_lap_second,stylized_feats = self.nets['net_rev_2'](revnet_input.detach(),stylized_feats,self.ada_alpha)
stylized_rev_second = fold_laplace_pyramid([stylized_rev_lap_second, stylized_up])
stylized_up = F.interpolate(stylized_rev_second, scale_factor=2)
size_x = stylized_up.shape[-2]
in_size_x = math.floor(size_x / 2)
move_x = adjust(size_x, in_size_x)
size_y = stylized_up.shape[-1]
in_size_y = math.floor(size_y / 2)
move_y = adjust(size_y, in_size_y)
for i in range(0,size_x,self.in_size_x):
for j in range(0,size_y,self.in_size_y):
label = str(self.outer_loop[0]*4+i*2)+'_'+str(self.outer_loop[1]*4+j*2)+'_'+self.second_set
if label in self.labels:
notin=True
for k in range(0,size_x,move_x):
for l in range(0,size_y,move_y):
label = str(self.outer_loop[0]*4+i*2+k)+'_'+str(self.outer_loop[1]*4+j*2+l)
if not label in self.labels:
notin=False
if notin:
continue
stylized_up_2 = paddle.slice(stylized_up,axes=[2,3],starts=[i,j],\
ends=[i+in_size_x,j+in_size_y])
self.first_patch_in = stylized_up_2.detach()
lap_2 = paddle.slice(self.laplacians[2],axes=[2,3],starts=[self.outer_loop[0]*2+i,self.outer_loop[1]*2+j],
ends=[self.outer_loop[0]*2+i+in_size_x,self.outer_loop[1]*2+j+in_size_y])
if lap_2.shape[-2]!=in_size_x or lap_2.shape[-1]!=in_size_y:
print('continue, line 1311')
continue
if stylized_up_2.shape[-2]!=in_size_x or stylized_up_2.shape[-1]!=in_size_y:
print('continue, line 1314')
continue
revnet_input_2 = paddle.concat(x=[lap_2, stylized_up_2.detach()], axis=1)
stylized_feats = self.nets['net_rev_3'].DownBlock(revnet_input.detach())
stylized_feats = self.nets['net_rev_3'].resblock(stylized_feats)
stylized_rev_patch,stylized_feats = self.nets['net_rev_3'](revnet_input_2.detach(),stylized_feats.detach(),self.ada_alpha_2)
stylized_rev_patch = fold_laplace_patch(
[stylized_rev_patch, stylized_up_2.detach()])
stylized_up_3 = F.interpolate(stylized_rev_patch, scale_factor=2)
for k in range(0,size_x,self.in_size_x):
for l in range(0,size_y,self.in_size_y):
label = str(self.outer_loop[0]*4+i*2+k)+'_'+str(self.outer_loop[1]*4+j*2+l)+'_'+str(self.counter)
if label in self.labels:
continue
if k+in_size_x>stylized_up_3.shape[-2] or l+in_size_y>stylized_up_3.shape[-1]:
print('continue, line 1331')
continue
stylized_up_4 = paddle.slice(stylized_up_3,axes=[2,3],starts=[k,l],\
ends=[k+in_size_x,l+in_size_y])
lap_3 = paddle.slice(self.laplacians[3],axes=[2,3],starts=[self.outer_loop[0]*4+i*2+k,self.outer_loop[1]*4+j*2+l*1],
ends=[self.outer_loop[0]*4+i*2+k+in_size_x,self.outer_loop[1]*4+j*2+l+in_size_y])
if lap_3.shape[-2]!=in_size_x or lap_3.shape[-1]!=in_size_y:
print('continue, line 1338')
continue
if stylized_up_4.shape[-2]!=in_size_x or stylized_up_4.shape[-1]!=in_size_y:
print('continue, line 1341')
continue
revnet_input_3 = paddle.concat(x=[lap_3, stylized_up_4.detach()], axis=1)
stylized_feats = self.nets['net_rev_4'].DownBlock(revnet_input_2.detach())
stylized_feats = self.nets['net_rev_4'].resblock(stylized_feats)
stylized_rev_patch_second,_ = self.nets['net_rev_4'](revnet_input_3.detach(),stylized_feats.detach(),self.ada_alpha_2)
stylized_rev_patch_second = fold_laplace_patch(
[stylized_rev_patch_second, stylized_up_4.detach()])
image_numpy=tensor2img(stylized_rev_patch_second,min_max=(0., 1.))
makedirs(os.path.join(self.output_dir, 'visual_test','tiles'))
img_path = os.path.join(self.output_dir, 'visual_test','tiles',
'%s.png' % (label))
self.out_images.append(image_numpy)
self.labels.append(label)
self.counter+=1
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
if self.is_train:
self.cF = self.nets['net_enc'](F.interpolate(self.content_stack[0],scale_factor=.5))
self.sF = self.nets['net_enc'](F.interpolate(self.style_stack[0], scale_factor=.5))
stylized_small= self.nets['net_dec'](self.cF, self.sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.laplacians[0], stylized_up], axis=1)
#rev_net thumb only calcs as patch if second parameter is passed
stylized_rev_lap,stylized_feats = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.visual_items['stylized_rev_first'] = stylized_rev
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
else:
stylized_up = self.stylized_up
stylized_feats = self.stylized_feats
stylized_up = crop_upsized(stylized_up,self.positions[0],self.size_stack[0])
revnet_input = paddle.concat(x=[self.laplacians[1], stylized_up], axis=1)
stylized_rev_lap_second,stylized_feats = self.nets['net_rev'](revnet_input.detach(),stylized_feats,self.ada_alpha)
stylized_rev_second = fold_laplace_pyramid([stylized_rev_lap_second, stylized_up])
self.visual_items['ci_2'] = self.content_stack[1]
self.stylized= stylized_rev_second
self.visual_items['stylized_rev_second'] = stylized_rev_second
stylized_up = F.interpolate(stylized_rev_second, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[1],self.size_stack[1])
self.first_patch_in = stylized_up.detach()
stylized_feats = self.nets['net_rev_2'].DownBlock(revnet_input.detach())
stylized_feats = self.nets['net_rev_2'].resblock(stylized_feats)
revnet_input = paddle.concat(x=[self.laplacians[2], stylized_up.detach()], axis=1)
stylized_rev_patch,stylized_feats = self.nets['net_rev_2'](revnet_input.detach(),stylized_feats.detach(),self.ada_alpha_2)
stylized_rev_patch = fold_laplace_patch(
[stylized_rev_patch, stylized_up.detach()])
self.visual_items['ci_3'] = self.content_stack[2]
self.visual_items['stylized_rev_third'] = stylized_rev_patch
stylized_up = F.interpolate(stylized_rev_patch, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[2],self.size_stack[2])
self.second_patch_in = stylized_up.detach()
revnet_input = paddle.concat(x=[self.laplacians[3], stylized_up.detach()], axis=1)
stylized_rev_patch_second,_ = self.nets['net_rev_2'](revnet_input.detach(),stylized_feats.detach(),self.ada_alpha_2)
stylized_rev_patch_second = fold_laplace_patch(
[stylized_rev_patch_second, stylized_up.detach()])
self.visual_items['ci_4'] = self.content_stack[3]
self.visual_items['stylized_rev_fourth'] = stylized_rev_patch_second
self.stylized = stylized_rev_patch
self.p_stylized = stylized_rev_patch_second
def backward_G(self):
self.cF = self.nets['net_enc'](self.content_stack[-2])
with paddle.no_grad():
self.tt_cropF = self.nets['net_enc'](self.first_patch_in)
self.tpF = self.nets['net_enc'](self.stylized)
"""patch loss"""
self.loss_patch = 0
# self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[-2]]:
self.loss_patch += paddle.clip(self.calc_content_loss(self.tpF[layer],
self.tt_cropF[layer]), 1e-5, 1e5)
self.losses['loss_patch'] = self.loss_patch
self.loss_content_p = 0
for layer in self.content_layers[:-1]:
self.loss_content_p += paddle.clip(self.calc_content_loss(self.tpF[layer],
self.cF[layer],
norm=True), 1e-5, 1e5)
self.losses['loss_content_p'] = self.loss_content_p
self.loss_ps = 0
self.p_loss_style_remd = 0
reshaped = paddle.split(self.style_stack[2], 2, 2)
for i in reshaped:
for j in paddle.split(i, 2, 3):
spF = self.nets['net_enc'](j.detach())
for layer in self.content_layers[:-1]:
self.loss_ps += paddle.clip(self.calc_style_loss(self.tpF[layer],
spF[layer]), 1e-5, 1e5)
self.p_loss_style_remd += self.calc_style_emd_loss(
self.tpF['r31'], spF['r31']) + self.calc_style_emd_loss(
self.tpF['r41'], spF['r41'])
self.losses['loss_ps'] = self.loss_ps
self.p_loss_content_relt = self.calc_content_relt_loss(
self.tpF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tpF['r41'], self.cF['r41'])
self.p_loss_style_remd = paddle.clip(self.p_loss_style_remd, 1e-5, 1e5)
self.p_loss_content_relt = paddle.clip(self.p_loss_content_relt, 1e-5, 1e5)
self.losses['p_loss_style_remd'] = self.p_loss_style_remd
self.losses['p_loss_content_relt'] = self.p_loss_content_relt
"""gan loss"""
pred_fake_p = self.nets['netD'](self.stylized)
self.loss_Gp_GAN = paddle.clip(self.gan_criterion(pred_fake_p, True), 1e-5, 1e5)
self.losses['loss_gan_Gp'] = self.loss_Gp_GAN
self.loss = self.loss_Gp_GAN *self.gan_thumb_weight +self.loss_ps/4 * self.style_weight +\
self.loss_content_p * self.content_weight +\
self.loss_patch * self.content_weight +\
self.p_loss_style_remd/4 * 18 + self.p_loss_content_relt * 18
self.loss.backward()
return self.loss
def backward_G_p(self):
cF = self.nets['net_enc'](self.content_stack[-1])
with paddle.no_grad():
tt_cropF = self.nets['net_enc'](self.second_patch_in)
tpF = self.nets['net_enc'](self.p_stylized)
"""patch loss"""
loss_patch = 0
# self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[-2]]:
loss_patch += paddle.clip(self.calc_content_loss(tpF[layer],
tt_cropF[layer]), 1e-5, 1e5)
self.losses['loss_patch2'] = loss_patch
loss_content_p = 0
for layer in self.content_layers[:-1]:
loss_content_p += paddle.clip(self.calc_content_loss(tpF[layer],
cF[layer],
norm=True), 1e-5, 1e5)
self.losses['loss_content_p2'] = loss_content_p
loss_ps = 0
p_loss_style_remd = 0
reshaped = paddle.split(self.style_stack[1], 2, 2)
for i in reshaped:
for j in paddle.split(i, 2, 3):
spF = self.nets['net_enc'](j.detach())
for layer in self.content_layers[:-1]:
loss_ps += paddle.clip(self.calc_style_loss(tpF[layer],
spF[layer]), 1e-5, 1e5)
p_loss_style_remd += self.calc_style_emd_loss(
tpF['r31'], spF['r31']) + self.calc_style_emd_loss(
tpF['r41'], spF['r41'])
self.losses['loss_ps2'] = loss_ps
p_loss_content_relt = self.calc_content_relt_loss(
tpF['r31'], cF['r31']) + self.calc_content_relt_loss(
tpF['r41'], cF['r41'])
p_loss_style_remd = paddle.clip(p_loss_style_remd, 1e-5, 1e5)
p_loss_content_relt = paddle.clip(p_loss_content_relt, 1e-5, 1e5)
self.losses['p_loss_style_remd2'] = self.p_loss_style_remd
self.losses['p_loss_content_relt2'] = self.p_loss_content_relt
"""gan loss"""
pred_fake_p = self.nets['netD_patch'](self.p_stylized)
loss_Gp_GAN = paddle.clip(self.gan_criterion(pred_fake_p, True), 1e-5, 1e5)
self.losses['loss_gan_Gp2'] = loss_Gp_GAN
loss_patch = loss_Gp_GAN * self.gan_patch_weight +loss_ps/4 * self.style_weight +\
loss_content_p * self.content_weight*4 +\
loss_patch * self.content_weight * 4+\
p_loss_style_remd/4 *26 + p_loss_content_relt * 26
loss_patch.backward()
return loss_patch
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
pred_p_fake = self.nets['netD'](self.stylized.detach())
self.loss_Dp_fake = paddle.clip(self.gan_criterion(pred_p_fake, False), 1e-5, 1e5)
pred_Dp_real = 0
reshaped = paddle.split(self.style_stack[2], 2, 2)
for i in reshaped:
for j in paddle.split(i, 2, 3):
self.loss_Dp_real = self.nets['netD'](j.detach())
pred_Dp_real += paddle.clip(self.gan_criterion(self.loss_Dp_real, True), 1e-5, 1e5)
self.loss_D_patch = (self.loss_Dp_fake + pred_Dp_real/4) * 0.5
self.loss_D_patch.backward()
self.losses['D_fake_loss'] = self.loss_Dp_fake
self.losses['D_real_loss'] = pred_Dp_real
def backward_Dpatch(self):
"""Calculate GAN loss for the discriminator"""
pred_p_fake = self.nets['netD_patch'](self.p_stylized.detach())
self.loss_Dp_fake = paddle.clip(self.gan_criterion(pred_p_fake, False), 1e-5, 1e5)
pred_Dp_real = 0
reshaped = paddle.split(self.style_stack[1], 2, 2)
for i in reshaped:
for j in paddle.split(i, 2, 3):
self.loss_Dp_real = self.nets['netD_patch'](j.detach())
pred_Dp_real += paddle.clip(self.gan_criterion(self.loss_Dp_real, True), 1e-5, 1e5)
self.loss_D_patch = (self.loss_Dp_fake + pred_Dp_real/4) * 0.5
self.loss_D_patch.backward()
self.losses['Dp_fake_loss'] = self.loss_Dp_fake
self.losses['Dp_real_loss'] = pred_Dp_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD'], True)
self.set_requires_grad(self.nets['netD_patch'], True)
optimizers['optimD'].clear_grad()
self.backward_D()
optimizers['optimD'].step()
self.set_requires_grad(self.nets['netD_patch'], True)
optimizers['optimD_patch'].clear_grad()
self.backward_Dpatch()
optimizers['optimD_patch'].step()
# update G
self.set_requires_grad(self.nets['netD_patch'], False)
self.set_requires_grad(self.nets['netD'], False)
optimizers['optimG'].clear_grad()
self.backward_G()
optimizers['optimG'].step()
optimizers['optimG'].clear_grad()
self.backward_G_p()
optimizers['optimG'].step()
@MODELS.register()
class LapStyleRevSecondMXDOG(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator_1,
revnet_discriminator_2,
revnet_discriminator_3,
revnet_discriminator_4,
spectral_discriminator,
draftnet_encode,
draftnet_decode,
revnet_deep_generator,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
mse_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0,
train_layer=1,
train_spectral=0,
ada_alpha=1.0,
ada_alpha_2=1.0,
gan_thumb_weight=1.0,
gan_patch_weight=1.0,
use_mdog=0,
morph_cutoff=47.9):
super(LapStyleRevSecondMXDOG, self).__init__()
self.train_spectral=train_spectral
self.train_layer=train_layer
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_dec']], False)
#init_weights(self.nets['net_dec'])
print(train_layer)
if train_layer>0:
# define the first revnet params
self.nets['net_rev'] = build_generator(revnet_generator)
self.nets['netD_1'] = build_discriminator(revnet_discriminator_1)
self.discriminators=['netD_1']
self.o = ['optimD1']
self.go = ['optimG1']
self.generator = ['net_rev']
if train_layer>1:
self.set_requires_grad([self.nets['net_rev']], False)
self.set_requires_grad([self.nets['netD_1']], False)
else:
print('init weights')
init_weights(self.nets['net_rev'])
init_weights(self.nets['netD_1'])
if train_layer>1:
self.nets['net_rev_2'] = build_generator(revnet_generator)
self.nets['netD_2'] = build_discriminator(revnet_discriminator_2)
self.discriminators.append('netD_2')
self.o.append('optimD2')
self.generator.append('net_rev_2')
self.go.append('optimG2')
if train_layer>2:
self.set_requires_grad([self.nets['net_rev_2']], False)
self.set_requires_grad([self.nets['netD_2']], False)
else:
init_weights(self.nets['net_rev_2'])
init_weights(self.nets['netD_2'])
if train_layer>2:
self.nets['net_rev_3'] = build_generator(revnet_deep_generator)
self.nets['netD_3'] = build_discriminator(revnet_discriminator_3)
self.discriminators.append('netD_3')
self.o.append('optimD3')
self.generator.append('net_rev_3')
self.go.append('optimG3')
if train_layer>3:
self.set_requires_grad([self.nets['net_rev_3']], False)
self.set_requires_grad([self.nets['netD_3']], False)
else:
init_weights(self.nets['net_rev_3'])
init_weights(self.nets['netD_3'])
if train_layer>3:
self.nets['net_rev_4'] = build_generator(revnet_deep_generator)
self.nets['netD_4'] = build_discriminator(revnet_discriminator_4)
self.discriminators.append('netD_4')
self.generator.append('net_rev_4')
self.o.append('optimD4')
self.go.append('optimG4')
if train_layer>4:
self.set_requires_grad([self.nets['net_rev_4']], False)
self.set_requires_grad([self.nets['netD_4']], False)
else:
init_weights(self.nets['net_rev_4'])
init_weights(self.nets['netD_4'])
if self.train_spectral==1:
self.nets['spectral_D'] = build_discriminator(spectral_discriminator)
init_weights(self.nets['spectral_D'])
l = np.repeat(np.array([[[[-8, -8, -8], [-8, 1, -8], [-8, -8, -8]]]]), 3, axis=0)
self.lap_filter = paddle.nn.Conv2D(3, 3, (3, 3), stride=1, bias_attr=False,
padding=1, groups=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=l), trainable=False)
)
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.mse_loss = build_criterion(mse_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
self.ada_alpha = ada_alpha
self.ada_alpha_2 = ada_alpha_2
self.gan_thumb_weight = gan_thumb_weight
self.gan_patch_weight = gan_patch_weight
self.morph_cutoff = morph_cutoff
g = np.repeat(gaussian(7, 1).numpy(), 3, axis=0)
g2 = np.repeat(gaussian(21, 3).numpy(), 3, axis=0)
self.gaussian_filter = paddle.nn.Conv2D(3, 3, 9,
groups=3, bias_attr=False,
padding=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g), trainable=False)
)
self.gaussian_filter_2 = paddle.nn.Conv2D(3, 3, 21,
groups=3, bias_attr=False,
padding=10, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g2), trainable=False)
)
self.morph_conv = paddle.nn.Conv2D(3, 3, 3, padding=1, groups=3,
padding_mode='reflect', bias_attr=False,
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
self.morph_conv_2 = paddle.nn.Conv2D(3, 3, 7, padding=3, groups=3,
padding_mode='reflect', bias_attr=False,
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
self.steps=0
def setup_input(self, input):
if self.is_train:
self.content_stack = []
self.style_stack = [paddle.to_tensor(input['style_stack_1']),paddle.to_tensor(input['style_stack_2'])]
self.laplacians=[]
for i in range(1,6):
if 'content_stack_'+str(i) in input:
self.content_stack.append(paddle.to_tensor(input['content_stack_'+str(i)]))
self.visual_items['ci'] = self.content_stack[0]
self.visual_items['si'] = self.style_stack[0]
self.content=input['content']
self.positions = input['position_stack']
self.size_stack = input['size_stack']
if self.train_layer>0:
self.laplacians.append(laplacian_conv(self.content_stack[0],self.lap_filter).detach())
if self.train_layer>1:
self.laplacians.append(laplacian_conv(self.content_stack[1],self.lap_filter).detach())
if self.train_layer>2:
self.laplacians.append(laplacian_conv(self.content_stack[2],self.lap_filter).detach())
if self.train_layer>3:
self.laplacians.append(laplacian_conv(self.content_stack[3],self.lap_filter).detach())
self.cX = False
self.sX = False
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](F.interpolate(self.content_stack[0],scale_factor=.5))
sF = self.nets['net_enc'](F.interpolate(self.style_stack[0], scale_factor=.5))
stylized_small= self.nets['net_dec'](cF, sF)
self.stylized=[stylized_small]
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.laplacians[0].detach(), stylized_up.detach()], axis=1)
#rev_net thumb only calcs as patch if second parameter is passed
stylized_rev_lap = self.nets['net_rev'](revnet_input.detach())
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small.detach()])
self.stylized.append(stylized_rev)
self.visual_items['stylized_rev_first'] = stylized_rev
if self.train_layer>1:
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[0],self.size_stack[0])
self.patches_in = [stylized_up.detach()]
stylized_feats = self.nets['net_rev_2'].DownBlock(revnet_input.detach())
stylized_feats = self.nets['net_rev_2'].resblock(stylized_feats)
revnet_input = paddle.concat(x=[self.laplacians[1].detach(), stylized_up.detach()], axis=1)
stylized_rev_lap_second,stylized_feats = self.nets['net_rev_2'](revnet_input.detach(),stylized_feats,self.ada_alpha)
stylized_rev_second = fold_laplace_pyramid([stylized_rev_lap_second, stylized_up.detach()])
self.visual_items['ci_2'] = self.content_stack[1]
self.stylized.append(stylized_rev_second)
self.visual_items['stylized_rev_second'] = stylized_rev_second
if self.train_layer>2:
stylized_up = F.interpolate(stylized_rev_second, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[1],self.size_stack[1])
self.patches_in.append(stylized_up.detach())
stylized_feats = self.nets['net_rev_3'].DownBlock(revnet_input.detach())
stylized_feats = self.nets['net_rev_3'].resblock(stylized_feats)
revnet_input = paddle.concat(x=[self.laplacians[2], stylized_up.detach()], axis=1)
stylized_rev_patch,stylized_feats = self.nets['net_rev_3'](revnet_input.detach(),stylized_feats,self.ada_alpha)
stylized_rev_patch = fold_laplace_patch(
[stylized_rev_patch, stylized_up.detach()])
self.visual_items['ci_3'] = self.content_stack[2]
self.visual_items['stylized_rev_third'] = stylized_rev_patch
self.stylized.append(stylized_rev_patch)
if self.train_layer>3:
stylized_up = F.interpolate(stylized_rev_patch.detach(), scale_factor=2)
stylized_up = crop_upsized(stylized_up.detach(),self.positions[2],self.size_stack[2])
self.patches_in.append(stylized_up.detach())
stylized_feats = self.nets['net_rev_4'].DownBlock(revnet_input.detach())
stylized_feats = self.nets['net_rev_4'].resblock(stylized_feats)
revnet_input = paddle.concat(x=[self.laplacians[3].detach(), stylized_up.detach()], axis=1)
stylized_rev_patch_second,_ = self.nets['net_rev_4'](revnet_input.detach(),stylized_feats,self.ada_alpha)
stylized_rev_patch_second = fold_laplace_patch(
[stylized_rev_patch_second, stylized_up.detach()])
self.visual_items['ci_4'] = self.content_stack[3]
self.visual_items['stylized_rev_fourth'] = stylized_rev_patch_second
self.stylized.append(stylized_rev_patch_second)
def backward_G(self,i):
cF = self.nets['net_enc'](self.content_stack[i].detach())
if i>2:
style_conv = self.morph_conv_2
morph_cutoff= 49*.9445
morph_num=2
else:
style_conv = self.morph_conv
morph_cutoff= 8.5
morph_num=2
tpF = self.nets['net_enc'](self.stylized[i+1])
"""patch loss"""
self.loss_patch = 0
if i!=0:
tt_cropF = self.nets['net_enc'](self.patches_in[i-1].detach())
for layer in [self.content_layers[-2]]:
self.loss_patch += self.calc_content_loss(tpF[layer],
tt_cropF[layer])
self.loss_content_p = 0
for layer in self.content_layers:
self.loss_content_p += self.calc_content_loss(tpF[layer],
cF[layer],
norm=True)
self.losses['loss_content_'+str(i+1)] = self.loss_content_p
self.loss_ps = 0
self.p_loss_style_remd = 0
if type(self.cX)==bool:
cx,cxminmax = xdog(self.content.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2,)
sx,sxminmax = xdog(self.style_stack[1].detach(),self.gaussian_filter,self.gaussian_filter_2,style_conv,morphs=2,morph_cutoff=morph_cutoff)
for j in range(i):
cx = paddle.slice(cx,axes=[2,3],starts=[(self.positions[j][1]).astype('int32'),(self.positions[j][0]).astype('int32')],\
ends=[(self.positions[j][3]).astype('int32'),(self.positions[j][2]).astype('int32')])
if cx.shape[-1]!=256:
cx=F.interpolate(cx,size=(256,256))
cXF = self.nets['net_enc'](cx.detach())
stylized_dog,_ = xdog(self.stylized[i+1],self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2,minmax=cxminmax)
cdogF = self.nets['net_enc'](stylized_dog)
mxdog_content = self.calc_content_loss(tpF['r31'], cXF['r31'])+self.calc_content_loss(tpF['r41'], cXF['r41'])
mxdog_content_contraint = self.calc_content_loss(cdogF['r31'], cXF['r31'])+self.calc_content_loss(cdogF['r41'], cXF['r41'])
if i>0:
reshaped = self.style_stack[1].detach()
for j in range(i):
k = random_crop_coords(reshaped.shape[-1])
reshaped=paddle.slice(reshaped,axes=[2,3],starts=[k[0],k[2]],ends=[k[1],k[3]])
sx = paddle.slice(sx,axes=[2,3],starts=[k[0],k[2]],ends=[k[1],k[3]])
if not reshaped.shape[-1]==256:
reshaped = F.interpolate(reshaped,size=(256,256))
spF = self.nets['net_enc'](reshaped.detach())
for layer in self.content_layers:
self.loss_ps += self.calc_style_loss(tpF[layer],
spF[layer])
self.p_loss_style_remd += self.calc_style_emd_loss(
tpF['r31'], spF['r31']) + self.calc_style_emd_loss(
tpF['r41'], spF['r41'])
sXF = self.nets['net_enc'](sx)
mxdog_style=0
mxdog_style+=self.mse_loss(cdogF['r31'], sXF['r31'])+self.mse_loss(cdogF['r41'], sXF['r41'])
self.loss_ps = self.loss_ps
self.p_loss_style_remd=self.p_loss_style_remd
#mxdog_style=mxdog_style
else:
spF = self.nets['net_enc'](self.style_stack[0].detach())
sXF = self.nets['net_enc'](sx)
for layer in self.content_layers:
self.loss_ps += self.calc_style_loss(tpF[layer],
spF[layer])
self.p_loss_style_remd += self.calc_style_emd_loss(
tpF['r31'], spF['r31']) + self.calc_style_emd_loss(
tpF['r41'], spF['r41'])
mxdog_style=self.mse_loss(cdogF['r31'], sXF['r31'])+self.mse_loss(cdogF['r41'], sXF['r41'])
#self.visual_items['cX']=cx
#self.visual_items['sX']=sx
self.losses['loss_ps_'+str(i+1)] = self.loss_ps
self.p_loss_content_relt = self.calc_content_relt_loss(
tpF['r31'], cF['r31']) + self.calc_content_relt_loss(
tpF['r41'], cF['r41'])
self.p_loss_content_relt = self.p_loss_content_relt
self.losses['p_loss_style_remd_'+str(i+1)] = self.p_loss_style_remd
self.losses['p_loss_content_relt_'+str(i+1)] = self.p_loss_content_relt
self.losses['loss_MD_'+str(i+1)] = mxdog_content*.3
self.losses['loss_CnsC_'+str(i+1)] = mxdog_content_contraint*100
"""gan loss"""
self.loss_Gp_GAN=0
pred_fake_p = self.nets[self.discriminators[-1]](self.stylized[i+1])
self.loss_Gp_GAN += self.gan_criterion(pred_fake_p, True)
self.loss_Gs_GAN = 0
if self.train_spectral==1:
pred_fake_ps = self.nets['spectral_D'](self.stylized[i+1])
self.loss_Gs_GAN += self.gan_criterion(pred_fake_ps, True)
f=1+i
g=1
if i==0:
a=10
b=16
c=1
d=1
elif i>0 and i<2:
a=16
b=16
c=(2*(i+1))
d=1
else:
a=26
b=26
c=(2*(i+1))
d=1
f=1.5+i
g=2.5
self.losses['loss_CnsS_'+str(i+1)] = mxdog_style*1000
mxdogloss=mxdog_content * .3 + mxdog_content_contraint *100 + mxdog_style * 1000
self.losses['loss_Gp_GAN']=self.loss_Gp_GAN*c
if i!=0:
self.losses['loss_patch_' + str(i + 1)] = self.loss_patch*d
self.loss = self.loss_Gp_GAN *c+self.loss_Gs_GAN*c+self.loss_ps * self.style_weight*f +\
self.loss_content_p * self.content_weight +\
self.loss_patch*d +\
self.p_loss_style_remd * a + self.p_loss_content_relt * b + mxdogloss*g
return self.loss
def backward_D(self,dec,i,name):
"""Calculate GAN loss for the discriminator"""
fake = self.stylized[i+1].detach()
pred_p_fake = dec(fake)
loss_Dp_fake = self.gan_criterion(pred_p_fake, False)
pred_Dp_real = 0
reshaped = self.style_stack[1]
if i>0:
for j in range(i):
k = random_crop_coords(reshaped.shape[-1])
reshaped=paddle.slice(reshaped,axes=[2,3],starts=[k[0],k[2]],ends=[k[1],k[3]])
if not reshaped.shape[-1]==256:
reshaped = F.interpolate(reshaped,size=(256,256))
loss_Dp_real = dec(reshaped.detach())
pred_Dp_real += self.gan_criterion(loss_Dp_real, True)
pred_Dp_real=pred_Dp_real
else:
reshaped = F.interpolate(reshaped,size=(256,256))
loss_Dp_real = dec(reshaped.detach())
pred_Dp_real += self.gan_criterion(loss_Dp_real, True)
self.loss_D_patch = (loss_Dp_fake + pred_Dp_real) * 0.5
self.losses[name+'_fake_loss_'+str(i)] = loss_Dp_fake
self.losses[name+'_real_loss_'+str(i)] = pred_Dp_real
return self.loss_D_patch
def train_iter(self, optimizers=None):
self.forward()
# update D
optimizers[self.o[-1]].clear_grad()
self.set_requires_grad(self.nets[self.discriminators[-1]],True)
loss = self.backward_D(self.nets[self.discriminators[-1]],self.train_layer-1,str(self.train_layer))
loss.backward()
optimizers[self.o[-1]].step()
self.set_requires_grad(self.nets[self.discriminators[-1]],False)
optimizers[self.o[-1]].clear_grad()
if self.train_spectral==1:
self.set_requires_grad(self.nets['spectral_D'],True)
optimizers['optimSD'].clear_grad()
loss=self.backward_D(self.nets['spectral_D'],self.train_layer-1,str(self.train_layer-1)+'s')
loss.backward()
optimizers['optimSD'].step()
self.set_requires_grad(self.nets['spectral_D'],False)
optimizers[self.go[-1]].clear_grad()
loss = self.backward_G(self.train_layer-1)
loss.backward()
optimizers[self.go[-1]].step()
optimizers[self.go[-1]].clear_grad()
@MODELS.register()
class LapStyleRevSecondMiddle(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
revnet_deep_generator,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0,
ada_alpha=1.0,
ada_alpha_2=1.0,
gan_thumb_weight=1.0,
gan_patch_weight=1.0,
use_mdog=0,
morph_cutoff=47.9,
rev3_iter=0,
rev4_iter=0):
super(LapStyleRevSecondMiddle, self).__init__()
self.scaler = paddle.amp.GradScaler(init_loss_scaling=1024)
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define the first revnet params
self.nets['net_rev'] = build_generator(revnet_generator)
self.set_requires_grad([self.nets['net_rev']], False)
# define the second revnet params
self.nets['net_rev_2'] = build_generator(revnet_deep_generator)
init_weights(self.nets['net_rev_2'])
self.nets['netD'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD'])
self.discriminators=[self.nets['netD']]
l = np.repeat(np.array([[[[-8, -8, -8], [-8, 1, -8], [-8, -8, -8]]]]), 3, axis=0)
self.lap_filter = paddle.nn.Conv2D(3, 3, (3, 3), stride=1, bias_attr=False,
padding=1, groups=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=l), trainable=False)
)
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
self.ada_alpha = ada_alpha
self.ada_alpha_2 = ada_alpha_2
self.gan_thumb_weight = gan_thumb_weight
self.gan_patch_weight = gan_patch_weight
self.morph_cutoff = morph_cutoff
g = np.repeat(gaussian(7, 1).numpy(), 3, axis=0)
g2 = np.repeat(gaussian(19, 3).numpy(), 3, axis=0)
self.gaussian_filter = paddle.nn.Conv2D(3, 3, 7,
groups=3, bias_attr=False,
padding=3, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g), trainable=False)
)
self.gaussian_filter_2 = paddle.nn.Conv2D(3, 3, 25,
groups=3, bias_attr=False,
padding=9, padding_mode='reflect',
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.NumpyArrayInitializer(
value=g2), trainable=False)
)
self.morph_conv = paddle.nn.Conv2D(3, 3, 3, padding=1, groups=3,
padding_mode='reflect', bias_attr=False,
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
self.morph_conv_2 = paddle.nn.Conv2D(3, 3, 11, padding=5, groups=3,
padding_mode='reflect', bias_attr=False,
weight_attr=paddle.ParamAttr(
initializer=paddle.fluid.initializer.Constant(
value=1), trainable=False)
)
def setup_input(self, input):
if self.is_train:
self.content_stack = []
self.style_stack = [paddle.to_tensor(input['style_stack_1']),paddle.to_tensor(input['style_stack_2'])]
self.laplacians=[]
for i in range(1,5):
if 'content_stack_'+str(i) in input:
self.content_stack.append(paddle.to_tensor(input['content_stack_'+str(i)]))
self.visual_items['ci'] = self.content_stack[0]
self.content=input['content']
self.positions = input['position_stack']
self.size_stack = input['size_stack']
self.laplacians.append(laplacian_conv(self.content_stack[0],self.lap_filter).detach())
self.laplacians.append(laplacian_conv(self.content_stack[1],self.lap_filter).detach())
self.laplacians.append(laplacian_conv(self.content_stack[2],self.lap_filter).detach())
self.sX=False
self.cX = False
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](F.interpolate(self.content_stack[0],scale_factor=.5))
sF = self.nets['net_enc'](F.interpolate(self.style_stack[0], scale_factor=.5))
stylized_small= self.nets['net_dec'](cF, sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.laplacians[0], stylized_up], axis=1)
#rev_net thumb only calcs as patch if second parameter is passed
stylized_rev_lap,stylized_feats = self.nets['net_rev'](revnet_input)
stylized_rev = fold_laplace_pyramid([stylized_rev_lap, stylized_small])
self.visual_items['stylized_rev_first'] = stylized_rev
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[0],self.size_stack[0])
self.patches_in = [stylized_up.detach()]
revnet_input = paddle.concat(x=[self.laplacians[1], stylized_up], axis=1)
stylized_rev_lap_second,stylized_feats = self.nets['net_rev_2'](revnet_input.detach(),stylized_feats,self.ada_alpha)
stylized_rev_second = fold_laplace_pyramid([stylized_rev_lap_second, stylized_up])
self.visual_items['ci_2'] = self.content_stack[1]
self.stylized= [stylized_rev_second]
self.visual_items['stylized_rev_second'] = stylized_rev_second
stylized_up = F.interpolate(stylized_rev_second, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[1],self.size_stack[1])
self.patches_in.append(stylized_up)
revnet_input = paddle.concat(x=[self.laplacians[2], stylized_up.detach()], axis=1)
stylized_rev_patch,stylized_feats = self.nets['net_rev_2'](revnet_input.detach(),stylized_feats.detach(),self.ada_alpha_2)
stylized_rev_patch = fold_laplace_patch(
[stylized_rev_patch, stylized_up.detach()])
self.visual_items['ci_3'] = self.content_stack[2]
self.visual_items['stylized_rev_third'] = stylized_rev_patch
self.stylized.append(stylized_rev_patch)
stylized_up = F.interpolate(stylized_rev_patch, scale_factor=2)
stylized_up = crop_upsized(stylized_up,self.positions[2],self.size_stack[2])
self.patches_in.append(stylized_up)
def backward_G(self,i):
cF = self.nets['net_enc'](self.content_stack[i])
with paddle.no_grad():
tt_cropF = self.nets['net_enc'](self.patches_in[i])
tpF = self.nets['net_enc'](self.stylized[i])
"""patch loss"""
self.loss_patch = 0
# self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[-2]]:
self.loss_patch += self.calc_content_loss(tpF[layer],
tt_cropF[layer])
self.losses['loss_patch_'+str(i+1)] = self.loss_patch
self.loss_content_p = 0
for layer in self.content_layers:
self.loss_content_p += self.calc_content_loss(tpF[layer],
cF[layer],
norm=True)
self.losses['loss_content_'+str(i+1)] = self.loss_content_p
self.loss_ps = 0
self.p_loss_style_remd = 0
mxdog_style=0
style_counter=0
if type(self.cX)==bool:
_,cxminmax = xdog(self.content.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2)
_,sxminmax = xdog(self.style_stack[1].detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2)
cX,_ = xdog(self.content_stack[i].detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2,minmax=cxminmax)
cXF = self.nets['net_enc'](cX.detach())
stylized_dog,_ = xdog(self.stylized[i],self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2,minmax=cxminmax)
cdogF = self.nets['net_enc'](stylized_dog)
mxdog_content = self.calc_content_loss(tpF['r31'], cXF['r31'])
mxdog_content_contraint = self.calc_content_loss(cdogF['r31'], cXF['r31'])
reshaped = self.style_stack[1]
for j in range(i):
k = random_crop_coords(reshaped.shape[-1])
reshaped=paddle.slice(reshaped,axes=[2,3],starts=[k[0],k[2]],ends=[k[1],k[3]])
if not reshaped.shape[-1]==512:
reshaped = F.interpolate(reshaped,size=(512,512))
reshaped = paddle.split(reshaped, 2, 2)
for idx,k in enumerate(reshaped):
for itx,j in enumerate(paddle.split(k, 2, 3)):
spF = self.nets['net_enc'](j.detach())
for layer in self.content_layers:
self.loss_ps += paddle.clip(self.calc_style_loss(tpF[layer],
spF[layer]), 1e-5, 1e5)
self.p_loss_style_remd += self.calc_style_emd_loss(
tpF['r31'], spF['r31']) + self.calc_style_emd_loss(
tpF['r41'], spF['r41'])
sX,_ = xdog(j.detach(),self.gaussian_filter,self.gaussian_filter_2,self.morph_conv,morphs=2,minmax=sxminmax)
sXF = self.nets['net_enc'](sX.detach())
mxdog_style+=self.calc_style_loss(cdogF['r31'], sXF['r31'])
style_counter += 1
if style_counter==4:
self.visual_items['sX_'+str(i)]=sX
self.losses['loss_ps_'+str(i+1)] = self.loss_ps/4
self.p_loss_content_relt = self.calc_content_relt_loss(
tpF['r31'], cF['r31']) + self.calc_content_relt_loss(
tpF['r41'], cF['r41'])
self.p_loss_style_remd = paddle.clip(self.p_loss_style_remd, 1e-5, 1e5)
self.p_loss_content_relt = paddle.clip(self.p_loss_content_relt, 1e-5, 1e5)
self.losses['p_loss_style_remd_'+str(i+1)] = self.p_loss_style_remd/4
self.losses['p_loss_content_relt_'+str(i+1)] = self.p_loss_content_relt
"""gan loss"""
pred_fake_p = self.discriminators[0](self.stylized[i])
self.loss_Gp_GAN = self.gan_criterion(pred_fake_p, True)
self.losses['loss_gan_Gp_'+str(i+1)] = self.loss_Gp_GAN*self.gan_thumb_weight
self.losses['loss_MD_'+str(i+1)] = mxdog_content*.0125
self.losses['loss_CnsC_'+str(i+1)] = mxdog_content_contraint*25
self.losses['loss_CnsS_'+str(i+1)] = mxdog_style*125/4
mxdogloss=mxdog_content * .0125 + mxdog_content_contraint *25 + (mxdog_style/4) * 125
self.loss = self.loss_Gp_GAN *self.gan_thumb_weight +self.loss_ps/4 * self.style_weight +\
self.loss_content_p * self.content_weight +\
self.loss_patch +\
self.p_loss_style_remd/4 * 22 + self.p_loss_content_relt * 22 + mxdogloss/(i+1)
return self.loss
def backward_D(self,dec,i):
"""Calculate GAN loss for the discriminator"""
pred_p_fake = dec(self.stylized[i].detach())
loss_Dp_fake = self.gan_criterion(pred_p_fake, False)
pred_Dp_real = 0
reshaped = self.style_stack[1]
for j in range(i):
k = random_crop_coords(reshaped.shape[-1])
reshaped=paddle.slice(reshaped,axes=[2,3],starts=[k[0],k[2]],ends=[k[1],k[3]])
if not reshaped.shape[-1]==512:
reshaped = F.interpolate(reshaped,size=(512,512))
reshaped = paddle.split(reshaped, 2, 2)
for k in reshaped:
for j in paddle.split(k, 2, 3):
loss_Dp_real = dec(j.detach())
pred_Dp_real += self.gan_criterion(loss_Dp_real, True)
self.loss_D_patch = (loss_Dp_fake + pred_Dp_real/4) * 0.5
self.losses['Dp_fake_loss_'+str(i)] = loss_Dp_fake
self.losses['Dp_real_loss_'+str(i)] = pred_Dp_real/4
return self.loss_D_patch
def train_iter(self, optimizers=None):
loops=2
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD'], True)
optimizers['optimD'].clear_grad()
l1=self.backward_D(self.nets['netD'],0)
l2=self.backward_D(self.nets['netD'],1)
(l1+l2).backward()
optimizers['optimD'].step()
self.set_requires_grad(self.nets['netD'], False)
# update G
optimizers['optimG'].clear_grad()
l1=self.backward_G(0)
l2=self.backward_G(1)
(l1+l2).backward()
optimizers['optimG'].step()
optimizers['optimG'].clear_grad()
def random_crop_coords(size):
halfsize=math.floor(size/2)
bottommost = random.choice(list(range(0, size - halfsize,2)))
leftmost = random.choice(list(range(0, size - halfsize,2)))
return (bottommost,bottommost+halfsize,leftmost,leftmost+halfsize)
@MODELS.register()
class LapStyleRevFirstPatch(BaseModel):
def __init__(self,
revnet_generator,
revnet_discriminator,
draftnet_encode,
draftnet_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
gan_criterion=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleRevFirstPatch, self).__init__()
# define draftnet params
self.nets['net_enc'] = build_generator(draftnet_encode)
self.nets['net_dec'] = build_generator(draftnet_decode)
self.set_requires_grad([self.nets['net_enc']], False)
self.set_requires_grad([self.nets['net_enc']], False)
# define revision-net params
self.nets['net_rev'] = build_generator(revnet_generator)
self.set_requires_grad([self.nets['net_rev']], False)
self.nets['net_rev_2'] = build_generator(revnet_generator)
init_weights(self.nets['net_rev_2'])
self.nets['netD_patch'] = build_discriminator(revnet_discriminator)
init_weights(self.nets['netD_patch'])
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.gan_criterion = build_criterion(gan_criterion)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.position = input['position']
self.half_position = input['half_position']
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.sp = paddle.to_tensor(input['sp'])
self.cp = paddle.to_tensor(input['cp'])
self.visual_items['cp'] = self.cp
self.pyr_ci = make_laplace_pyramid(self.ci, 2)
self.pyr_si = make_laplace_pyramid(self.si, 2)
self.pyr_cp = make_laplace_pyramid(self.cp, 2)
self.pyr_ci.append(self.ci)
self.pyr_si.append(self.si)
self.pyr_cp.append(self.cp)
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
cF = self.nets['net_enc'](self.pyr_ci[2])
sF = self.nets['net_enc'](self.pyr_si[2])
stylized_small = self.nets['net_dec'](cF, sF)
self.visual_items['stylized_small'] = stylized_small
stylized_up = F.interpolate(stylized_small, scale_factor=2)
revnet_input = paddle.concat(x=[self.pyr_ci[1], stylized_up], axis=1)
stylized_rev_lap,stylized_feats = self.nets['net_rev'](revnet_input.detach())
#self.ttF_res=self.ttF_res.detach()
stylized_rev = fold_laplace_pyramid([stylized_rev_lap.detach(), stylized_small.detach()])
stylized_up = F.interpolate(stylized_rev, scale_factor=2)
p_stylized_up = paddle.slice(stylized_up,axes=[2,3],starts=[self.half_position[0],self.half_position[2]],ends=[self.half_position[1],self.half_position[3]])
p_revnet_input = paddle.concat(x=[self.pyr_cp[1], p_stylized_up], axis=1)
p_stylized_rev_lap,stylized_feats = self.nets['net_rev'](p_revnet_input.detach(),stylized_feats.detach())
p_stylized_rev = fold_laplace_pyramid([p_stylized_rev_lap.detach(), p_stylized_up.detach()])
stylized_up = F.interpolate(p_stylized_rev, scale_factor=2)
patch_origin_size = 512
i = random_crop_coords(patch_origin_size)
stylized_feats = self.nets['net_rev_2'].DownBlock(p_revnet_input.detach())
stylized_feats = self.nets['net_rev_2'].resblock(stylized_feats)
self.input_crop = paddle.slice(stylized_up.detach(),axes=[2,3],starts=[i[0],i[2]],ends=[i[1],i[3]])
cp_crop = paddle.slice(self.pyr_cp[0],axes=[2,3],starts=[i[0],i[2]],ends=[i[1],i[3]])
p_revnet_input = paddle.concat(x=[cp_crop, self.input_crop], axis=1)
p_stylized_rev_patch,_ = self.nets['net_rev_2'](p_revnet_input.detach(),stylized_feats)
p_stylized_rev_patch = p_stylized_rev_patch+ self.input_crop.detach()
stylized = stylized_rev
self.p_stylized = p_stylized_rev_patch
self.content_patch = paddle.slice(self.cp,axes=[2,3],starts=[i[0],i[2]],ends=[i[1],i[3]])
self.visual_items['stylized'] = stylized
self.visual_items['stylized_patch'] = p_stylized_rev
self.visual_items['stylized_patch_2'] = p_stylized_rev_patch
self.crop_marks = i
self.style_patch = paddle.slice(self.sp,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
self.style_patch = paddle.slice(self.style_patch,axes=[2,3],starts=[self.crop_marks[0],self.crop_marks[2]],ends=[self.crop_marks[1],self.crop_marks[3]])
def backward_G(self, optimizer):
self.cF = self.nets['net_enc'](self.content_patch)
with paddle.no_grad():
self.tt_cropF = self.nets['net_enc'](self.input_crop)
self.tpF = self.nets['net_enc'](self.p_stylized)
"""patch loss"""
self.loss_patch = 0
# self.loss_patch= self.calc_content_loss(self.tpF['r41'],self.tt_cropF['r41'])#+\
# self.calc_content_loss(self.tpF['r51'],self.tt_cropF['r51'])
for layer in [self.content_layers[-2]]:
self.loss_patch += paddle.clip(self.calc_content_loss(self.tpF[layer],
self.tt_cropF[layer]), 1e-5, 1e5)
self.losses['loss_patch'] = self.loss_patch
self.loss_content_p = 0
for layer in self.content_layers:
self.loss_content_p += paddle.clip(self.calc_content_loss(self.tpF[layer],
self.cF[layer],
norm=True), 1e-5, 1e5)
self.losses['loss_content_p'] = self.loss_content_p
self.loss_ps = 0
self.p_loss_style_remd = 0
style_patches = paddle.slice(self.sp,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
reshaped = paddle.split(style_patches, 2, 2)
for i in reshaped:
for j in paddle.split(i, 2, 3):
spF = self.nets['net_enc'](j.detach())
for layer in self.content_layers:
self.loss_ps += paddle.clip(self.calc_style_loss(self.tpF[layer],
spF[layer]), 1e-5, 1e5)
self.p_loss_style_remd += self.calc_style_emd_loss(
self.tpF['r31'], spF['r31']) + self.calc_style_emd_loss(
self.tpF['r41'], spF['r41'])
self.losses['loss_ps'] = self.loss_ps
self.p_loss_content_relt = self.calc_content_relt_loss(
self.tpF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tpF['r41'], self.cF['r41'])
self.p_loss_style_remd = paddle.clip(self.p_loss_style_remd, 1e-5, 1e5)
self.p_loss_content_relt = paddle.clip(self.p_loss_content_relt, 1e-5, 1e5)
self.losses['p_loss_style_remd'] = self.p_loss_style_remd
self.losses['p_loss_content_relt'] = self.p_loss_content_relt
"""gan loss"""
pred_fake_p = self.nets['netD_patch'](self.p_stylized)
self.loss_Gp_GAN = paddle.clip(self.gan_criterion(pred_fake_p, True), 1e-5, 1e5)
self.losses['loss_gan_Gp'] = self.loss_Gp_GAN
self.loss = self.loss_Gp_GAN +self.loss_ps/4 * self.style_weight +\
self.loss_content_p * self.content_weight +\
self.loss_patch * self.content_weight * 20 +\
self.p_loss_style_remd/4 * 22 + self.p_loss_content_relt * 22
self.loss.backward()
return self.loss
def backward_Dpatch(self):
"""Calculate GAN loss for the patch discriminator"""
pred_p_fake = self.nets['netD_patch'](self.p_stylized.detach())
self.loss_Dp_fake = paddle.clip(self.gan_criterion(pred_p_fake, False), 1e-5, 1e5)
pred_Dp_real = 0
style_patches = paddle.slice(self.sp,axes=[2,3],starts=[self.position[0],self.position[2]],ends=[self.position[1],self.position[3]])
reshaped = paddle.split(style_patches, 2, 2)
for i in reshaped:
for j in paddle.split(i, 2, 3):
self.loss_Dp_real = self.nets['netD_patch'](j.detach())
pred_Dp_real += paddle.clip(self.gan_criterion(self.loss_Dp_real, True), 1e-5, 1e5)
self.loss_D_patch = (self.loss_Dp_fake + pred_Dp_real/4) * 0.5
self.loss_D_patch.backward()
self.losses['Dp_fake_loss'] = self.loss_Dp_fake
self.losses['Dp_real_loss'] = pred_Dp_real
def train_iter(self, optimizers=None):
# compute fake images: G(A)
self.forward()
# update D
self.set_requires_grad(self.nets['netD_patch'], True)
optimizers['optimD_patch'].clear_grad()
self.backward_Dpatch()
optimizers['optimD_patch'].step()
# update G
self.set_requires_grad(self.nets['netD_patch'], False)
optimizers['optimG'].clear_grad()
self.backward_G(optimizers['optimG'])
optimizers['optimG'].step()
@MODELS.register()
class LapStyleDraK(BaseModel):
def __init__(self,
generator_encode,
generator_decode,
calc_style_emd_loss=None,
calc_content_relt_loss=None,
calc_content_loss=None,
calc_style_loss=None,
content_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
style_layers=['r11', 'r21', 'r31', 'r41', 'r51'],
content_weight=1.0,
style_weight=3.0):
super(LapStyleDraK, self).__init__()
# define generators
self.nets['net_enc'] = build_generator(generator_encode)
self.nets['net_dec'] = build_generator(generator_decode)
init_weights(self.nets['net_dec'])
self.set_requires_grad([self.nets['net_enc']], False)
# define loss functions
self.calc_style_emd_loss = build_criterion(calc_style_emd_loss)
self.calc_content_relt_loss = build_criterion(calc_content_relt_loss)
self.calc_content_loss = build_criterion(calc_content_loss)
self.calc_style_loss = build_criterion(calc_style_loss)
self.content_layers = content_layers
self.style_layers = style_layers
self.content_weight = content_weight
self.style_weight = style_weight
def setup_input(self, input):
self.ci = paddle.to_tensor(input['ci'])
self.visual_items['ci'] = self.ci
self.si = paddle.to_tensor(input['si'])
self.visual_items['si'] = self.si
self.image_paths = input['ci_path']
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
self.cF = self.nets['net_enc'](self.ci)
self.sF = self.nets['net_enc'](self.si)
self.stylized = self.nets['net_dec'](self.cF, self.sF)
self.visual_items['stylized'] = self.stylized
def backward_Dec(self):
self.tF = self.nets['net_enc'](self.stylized)
"""content loss"""
self.loss_c = 0
for layer in self.content_layers[:-1]:
self.loss_c += self.calc_content_loss(self.tF[layer],
self.cF[layer],
norm=True)
self.losses['loss_c'] = self.loss_c
"""style loss"""
self.loss_s = 0
for layer in self.style_layers:
self.loss_s += self.calc_style_loss(self.tF[layer], self.sF[layer])
self.losses['loss_s'] = self.loss_s
"""IDENTITY LOSSES"""
self.Icc = self.nets['net_dec'](self.cF, self.cF)
self.l_identity1 = self.calc_content_loss(self.Icc, self.ci)
self.Fcc = self.nets['net_enc'](self.Icc)
self.l_identity2 = 0
for layer in self.content_layers:
self.l_identity2 += self.calc_content_loss(self.Fcc[layer],
self.cF[layer])
self.losses['l_identity1'] = self.l_identity1
self.losses['l_identity2'] = self.l_identity2
"""relative loss"""
self.loss_style_remd = self.calc_style_emd_loss(
self.tF['r31'], self.sF['r31']) + self.calc_style_emd_loss(
self.tF['r41'], self.sF['r41'])
self.loss_content_relt = self.calc_content_relt_loss(
self.tF['r31'], self.cF['r31']) + self.calc_content_relt_loss(
self.tF['r41'], self.cF['r41'])
self.losses['loss_style_remd'] = self.loss_style_remd
self.losses['loss_content_relt'] = self.loss_content_relt
self.loss = self.loss_c * self.content_weight + self.loss_s * self.style_weight +\
self.l_identity1 * 50 + self.l_identity2 * 1 + self.loss_style_remd * 10 + \
self.loss_content_relt * 16
self.loss.backward()
return self.loss
def train_iter(self, optimizers=None):
"""Calculate losses, gradients, and update network weights"""
self.forward()
optimizers['optimG'].clear_grad()
self.backward_Dec()
self.optimizers['optimG'].step()
| 47.6826
| 186
| 0.591244
| 21,112
| 163,599
| 4.303619
| 0.028609
| 0.035572
| 0.035594
| 0.023267
| 0.893471
| 0.866143
| 0.849336
| 0.837494
| 0.806698
| 0.785688
| 0
| 0.025161
| 0.284323
| 163,599
| 3,431
| 187
| 47.6826
| 0.750846
| 0.046424
| 0
| 0.722243
| 0
| 0
| 0.050732
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03701
| false
| 0.001466
| 0.005497
| 0.000366
| 0.060095
| 0.004764
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cc67667f0e0725b3134f72ed579d4e6c52672608
| 146
|
py
|
Python
|
losses/__init__.py
|
Jaskaran197/Red-blood-cell-detection-SSD
|
a33b330ad17454a7425aa7f57818c0a41b4e0ff9
|
[
"MIT"
] | null | null | null |
losses/__init__.py
|
Jaskaran197/Red-blood-cell-detection-SSD
|
a33b330ad17454a7425aa7f57818c0a41b4e0ff9
|
[
"MIT"
] | null | null | null |
losses/__init__.py
|
Jaskaran197/Red-blood-cell-detection-SSD
|
a33b330ad17454a7425aa7f57818c0a41b4e0ff9
|
[
"MIT"
] | null | null | null |
from .smooth_l1_loss import SMOOTH_L1_LOSS
from .softmax_loss import SOFTMAX_LOSS
from .ssd_loss import SSD_LOSS
from .tbpp_loss import TBPP_LOSS
| 29.2
| 42
| 0.863014
| 26
| 146
| 4.461538
| 0.307692
| 0.344828
| 0.206897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015385
| 0.109589
| 146
| 4
| 43
| 36.5
| 0.876923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ccb0087eb7509517bb8d58564ad613328cf0798e
| 9,266
|
py
|
Python
|
molo/polls/tests/test_admin.py
|
praekeltfoundation/molo.polls
|
7abd0ff8c16c4dac9ae0eb093559810eda282d7d
|
[
"BSD-2-Clause"
] | 1
|
2016-02-23T19:07:42.000Z
|
2016-02-23T19:07:42.000Z
|
molo/polls/tests/test_admin.py
|
praekelt/molo.polls
|
7abd0ff8c16c4dac9ae0eb093559810eda282d7d
|
[
"BSD-2-Clause"
] | 30
|
2016-02-17T11:39:00.000Z
|
2018-10-15T12:40:02.000Z
|
molo/polls/tests/test_admin.py
|
praekelt/molo.polls
|
7abd0ff8c16c4dac9ae0eb093559810eda282d7d
|
[
"BSD-2-Clause"
] | null | null | null |
import datetime
from django.core.urlresolvers import reverse
from molo.polls.admin import QuestionAdmin, download_as_csv
from molo.polls.models import (
Choice,
Question,
FreeTextQuestion,
)
from molo.polls.tests.base import BasePollsTestCase
class AdminTestCase(BasePollsTestCase):
def test_download_csv_question(self):
# make choices
choice1 = Choice(title='yes')
choice2 = Choice(title='no')
# make a question
question = Question(
title='is this a test',
allow_multiple_choice=True, show_results=False)
self.polls_index.add_child(instance=question)
question.add_child(instance=choice1)
question.add_child(instance=choice2)
question.save_revision().publish()
# make a vote
self.client.login(
username=self.superuser_name,
password=self.superuser_password
)
self.client.post(
reverse('molo.polls:vote',
kwargs={'question_id': question.id}),
{'choice': [choice1.id, choice2.id]})
# should automatically create the poll vote
# test poll vote
response = download_as_csv(QuestionAdmin(Question, self.site),
None,
Question.objects.all())
date = str(datetime.datetime.now().date())
expected_output = (('title,date_submitted,user,answer'
'\r\nis this a test,{0},{1},'
'"yes,no"\r\n').format(
date,
self.superuser_name))
self.assertContains(response, expected_output)
def test_choice_short_name(self):
# make choices
choice1 = Choice(title='yes', short_name='y')
choice2 = Choice(title='no', short_name='n')
# make a question
question = Question(
title='is this a test',
allow_multiple_choice=True, show_results=False)
self.polls_index.add_child(instance=question)
question.add_child(instance=choice1)
question.add_child(instance=choice2)
question.save_revision().publish()
# make a vote
self.client.login(
username=self.superuser_name,
password=self.superuser_password
)
self.client.post(
reverse('molo.polls:vote',
kwargs={'question_id': question.id}),
{'choice': [choice1.id, choice2.id]})
# should automatically create the poll vote
# test poll vote
response = download_as_csv(QuestionAdmin(Question, self.site),
None,
Question.objects.all())
date = str(datetime.datetime.now().date())
expected_output = (('title,date_submitted,user,answer'
'\r\nis this a test,{0},{1},'
'"y,n"\r\n').format(
date,
self.superuser_name))
self.assertContains(response, expected_output)
def test_choice_short_name_single_choice(self):
# make choices
choice1 = Choice(title='yes', short_name='y')
# make a question
question = Question(
title='is this a test',
allow_multiple_choice=True, show_results=False)
self.polls_index.add_child(instance=question)
question.add_child(instance=choice1)
question.save_revision().publish()
# make a vote
self.client.login(
username=self.superuser_name,
password=self.superuser_password
)
self.client.post(
reverse('molo.polls:vote',
kwargs={'question_id': question.id}),
{'choice': choice1.id})
# should automatically create the poll vote
# test poll vote
response = download_as_csv(QuestionAdmin(Question, self.site),
None,
Question.objects.all())
date = str(datetime.datetime.now().date())
expected_output = (('title,date_submitted,user,answer'
'\r\nis this a test,{0},{1},'
'y\r\n').format(
date,
self.superuser_name))
self.assertContains(response, expected_output)
def test_download_csv_free_text_question(self):
question = FreeTextQuestion(
title='is this a test')
self.polls_index.add_child(instance=question)
question.save_revision().publish()
self.client.login(
username=self.superuser_name,
password=self.superuser_password
)
response = self.client.get('/')
self.assertContains(response, 'is this a test')
self.client.post(
reverse('molo.polls:free_text_vote',
kwargs={'question_id': question.id}),
{'answer': 'this is an answer'})
response = download_as_csv(QuestionAdmin(Question, self.site),
None,
Question.objects.all())
date = str(datetime.datetime.now().date())
expected_output = (('title,date_submitted,user,answer'
'\r\nis this a test,{0},{1},'
'this is an answer\r\n').format(
date,
self.superuser_name))
self.assertContains(response, expected_output)
def test_download_csv_free_text_question_short_name(self):
question = FreeTextQuestion(
title='is this a test', short_name='short')
self.polls_index.add_child(instance=question)
question.save_revision().publish()
self.client.login(
username=self.superuser_name,
password=self.superuser_password
)
response = self.client.get('/')
self.assertContains(response, 'is this a test')
self.client.post(
reverse('molo.polls:free_text_vote',
kwargs={'question_id': question.id}),
{'answer': 'this is an answer'})
response = download_as_csv(QuestionAdmin(Question, self.site),
None,
Question.objects.all())
date = str(datetime.datetime.now().date())
expected_output = (('title,date_submitted,user,answer'
'\r\nshort,{0},{1},'
'this is an answer\r\n').format(
date,
self.superuser_name))
self.assertContains(response, expected_output)
def test_multisite_download_csv_question(self):
# make choices
choice1 = Choice(title='yes')
choice2 = Choice(title='no')
# make a question
question = Question(
title='poll for main1',
allow_multiple_choice=True, show_results=False)
self.polls_index.add_child(instance=question)
question.add_child(instance=choice1)
question.add_child(instance=choice2)
question.save_revision().publish()
# make a vote
self.client.login(
username=self.superuser_name,
password=self.superuser_password
)
self.client.post(
reverse('molo.polls:vote',
kwargs={'question_id': question.id}),
{'choice': [choice1.id, choice2.id]})
# should automatically create the poll vote
# test poll vote
response = self.client.get(
('/admin/polls/question/{0}/'
'results/?action=download').format(question.pk))
date = str(datetime.datetime.now().date())
expected_output = (
'Submission Date,Answer,User\r\n{0},"yes,no",{1}\r\n').format(
date,
self.superuser_name)
self.assertContains(response, expected_output)
# test seperation on multi-site
# make choices
choice1_main2 = Choice(title='yes')
# make a question
question_main2 = Question(
title='poll for main2',
allow_multiple_choice=True, show_results=False)
self.polls_index_main2.add_child(instance=question_main2)
question_main2.add_child(instance=choice1_main2)
question_main2.save_revision().publish()
self.client2.login(
username=self.superuser_name,
password=self.superuser_password
)
self.client2.post(
reverse('molo.polls:vote',
kwargs={'question_id': question_main2.id}),
{'choice': [choice1_main2.id]})
expected_output = (
'Submission Date,Answer,User\r\n{0},yes,{1}\r\n').format(
date,
self.superuser_name)
response = self.client2.get(
('/admin/polls/question/{0}/'
'results/?action=download').format(question_main2.pk))
self.assertContains(response, expected_output)
| 38.448133
| 74
| 0.554716
| 940
| 9,266
| 5.310638
| 0.109574
| 0.054688
| 0.048077
| 0.015425
| 0.870192
| 0.862179
| 0.862179
| 0.862179
| 0.832332
| 0.822716
| 0
| 0.008983
| 0.339197
| 9,266
| 240
| 75
| 38.608333
| 0.806304
| 0.048565
| 0
| 0.75
| 0
| 0.005208
| 0.112147
| 0.04379
| 0
| 0
| 0
| 0
| 0.046875
| 1
| 0.03125
| false
| 0.036458
| 0.026042
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aebfaa1522438698db6e3fd0333125715afdad5d
| 5,001
|
py
|
Python
|
src/train_and_eval.py
|
felixbmuller/Pre-Training-is-almost-all-you-Need-for-Commonsense-Reasoning
|
c127ed8fd22688e7c09c6bc19b99c0912ac9bc44
|
[
"MIT"
] | 3
|
2021-07-15T08:14:59.000Z
|
2022-02-11T23:37:21.000Z
|
src/train_and_eval.py
|
felixbmuller/Pre-Training-is-almost-all-you-Need-for-Commonsense-Reasoning
|
c127ed8fd22688e7c09c6bc19b99c0912ac9bc44
|
[
"MIT"
] | null | null | null |
src/train_and_eval.py
|
felixbmuller/Pre-Training-is-almost-all-you-Need-for-Commonsense-Reasoning
|
c127ed8fd22688e7c09c6bc19b99c0912ac9bc44
|
[
"MIT"
] | 3
|
2021-12-13T08:49:05.000Z
|
2022-03-07T19:48:42.000Z
|
from argparse import Namespace
import logging
import os
from pytorch_lightning import Trainer, seed_everything
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping
from pytorch_lightning.loggers import TensorBoardLogger
from models import (
PlausibilityRankingRoBERTaForCQA,
PlausibilityRankingRoBERTaForARCT,
)
def train_and_eval_cqa(
do_train,
do_test,
load_from_checkpoint,
epochs,
learning_rate,
weight_decay,
warmup_ratio,
loss_threshold,
gradient_accumulation_steps,
seed,
model_name,
batch_size,
forward_pass_size,
max_seq_len,
premise_max_len,
log_path,
checkpoint_path,
use_cached_data,
save_top_k,
use_early_stop,
early_stop_metric,
early_stop_patience,
early_stop_mode,
):
hparams = Namespace(
learning_rate=learning_rate,
gradient_accumulation_steps=gradient_accumulation_steps,
seed=seed,
model_name=model_name,
batch_size=batch_size,
forward_pass_size=forward_pass_size,
max_seq_len=max_seq_len,
premise_max_len=premise_max_len,
weight_decay=weight_decay,
warmup_ratio=warmup_ratio,
loss_threshold=loss_threshold,
)
seed_everything(hparams.seed)
if load_from_checkpoint is None:
roberta = PlausibilityRankingRoBERTaForCQA(
hparams, epochs=epochs, use_cached_data=use_cached_data
)
else:
roberta = PlausibilityRankingRoBERTaForCQA.load_from_checkpoint(
load_from_checkpoint,
hparams=hparams,
epochs=epochs,
use_cached_data=use_cached_data
)
roberta_name = "Plausibility_RoBERTa_CQA"
checkpoint_callback = ModelCheckpoint(
filepath=os.path.join(checkpoint_path, roberta_name + "_{epoch:02d}"),
save_top_k=save_top_k,
monitor="Loss/Validation",
mode="min",
)
logger = TensorBoardLogger(log_path, name=roberta_name)
kwargs = dict()
if hparams.gradient_accumulation_steps:
kwargs["accumulate_grad_batches"] = hparams.gradient_accumulation_steps
if use_early_stop:
kwargs["callbacks"] = [
EarlyStopping(
monitor=early_stop_metric,
patience=early_stop_patience,
mode=early_stop_mode,
)
]
trainer = Trainer(
max_epochs=epochs,
logger=logger,
checkpoint_callback=checkpoint_callback,
gpus=1,
**kwargs,
)
if do_train:
trainer.fit(roberta)
if do_test:
trainer.test(roberta)
def train_and_eval_arct(
do_train,
do_test,
load_from_checkpoint,
epochs,
learning_rate,
weight_decay,
warmup_ratio,
loss_threshold,
gradient_accumulation_steps,
seed,
model_name,
batch_size,
forward_pass_size,
max_seq_len,
premise_max_len,
data_path,
log_path,
checkpoint_path,
use_cached_data,
save_top_k,
use_early_stop,
early_stop_metric,
early_stop_patience,
early_stop_mode,
):
hparams = Namespace(
learning_rate=learning_rate,
gradient_accumulation_steps=gradient_accumulation_steps,
seed=seed,
model_name=model_name,
batch_size=batch_size,
forward_pass_size=forward_pass_size,
max_seq_len=max_seq_len,
premise_max_len=premise_max_len,
weight_decay=weight_decay,
warmup_ratio=warmup_ratio,
loss_threshold=loss_threshold,
)
seed_everything(hparams.seed)
if load_from_checkpoint is None:
roberta = PlausibilityRankingRoBERTaForARCT(
hparams, data_path=data_path, epochs=epochs, use_cached_data=use_cached_data
)
else:
roberta = PlausibilityRankingRoBERTaForARCT.load_from_checkpoint(
load_from_checkpoint,
hparams=hparams,
data_path=data_path,
epochs=epochs,
use_cached_data=use_cached_data,
)
roberta_name = "Plausibility_RoBERTa_ARCT"
checkpoint_callback = ModelCheckpoint(
filepath=os.path.join(checkpoint_path, roberta_name + "_{epoch:02d}"),
save_top_k=save_top_k,
monitor="Loss/Validation",
mode="min",
)
logger = TensorBoardLogger(log_path, name=roberta_name)
kwargs = dict()
if hparams.gradient_accumulation_steps:
kwargs["accumulate_grad_batches"] = hparams.gradient_accumulation_steps
if use_early_stop:
kwargs["callbacks"] = [
EarlyStopping(
monitor=early_stop_metric,
patience=early_stop_patience,
mode=early_stop_mode,
)
]
trainer = Trainer(
max_epochs=epochs,
logger=logger,
checkpoint_callback=checkpoint_callback,
gpus=1,
**kwargs
)
if do_train:
trainer.fit(roberta)
if do_test:
trainer.test(roberta)
| 25.911917
| 88
| 0.665867
| 539
| 5,001
| 5.753247
| 0.163265
| 0.046437
| 0.080619
| 0.036762
| 0.848114
| 0.848114
| 0.848114
| 0.848114
| 0.820381
| 0.815866
| 0
| 0.001641
| 0.268746
| 5,001
| 192
| 89
| 26.046875
| 0.846322
| 0
| 0
| 0.767442
| 0
| 0
| 0.034593
| 0.018996
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011628
| false
| 0.023256
| 0.040698
| 0
| 0.052326
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
aedf1988d9a387e42b0981198d42a0e4e38a357a
| 111
|
py
|
Python
|
src/bar/first_module.py
|
BarisSari/medium-pre-commit-article
|
7f0107696a11fc1b162c1188152ff615b5f9f8b8
|
[
"MIT"
] | null | null | null |
src/bar/first_module.py
|
BarisSari/medium-pre-commit-article
|
7f0107696a11fc1b162c1188152ff615b5f9f8b8
|
[
"MIT"
] | null | null | null |
src/bar/first_module.py
|
BarisSari/medium-pre-commit-article
|
7f0107696a11fc1b162c1188152ff615b5f9f8b8
|
[
"MIT"
] | null | null | null |
def example():
return "Hi from bar"
def example_2():
return "Hi again, this is from bar by the way!"
| 15.857143
| 51
| 0.63964
| 19
| 111
| 3.684211
| 0.684211
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.252252
| 111
| 6
| 52
| 18.5
| 0.831325
| 0
| 0
| 0
| 0
| 0
| 0.441441
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
aee5f9a3ba0a49eff0a6bbb1c88ca90e79f4a72e
| 158,735
|
py
|
Python
|
ePaperPython3color/imagedata.py
|
winstonma/rpi-magicmirror-eink
|
dc185c1602be277d74bb74e855c8f7200330cf43
|
[
"MIT"
] | 168
|
2018-11-30T03:43:57.000Z
|
2022-03-31T08:28:06.000Z
|
ePaperPython3color/imagedata.py
|
winstonma/rpi-magicmirror-eink
|
dc185c1602be277d74bb74e855c8f7200330cf43
|
[
"MIT"
] | 7
|
2018-11-30T03:44:37.000Z
|
2022-01-15T08:14:58.000Z
|
ePaperPython3color/imagedata.py
|
winstonma/rpi-magicmirror-eink
|
dc185c1602be277d74bb74e855c8f7200330cf43
|
[
"MIT"
] | 23
|
2019-01-26T14:20:17.000Z
|
2022-01-15T08:15:21.000Z
|
##
# @filename : imagedata.py
# @brief : data file for epd demo
#
# Copyright (C) Waveshare August 16 2017
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documnetation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS OR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
MONOCOLOR_BITMAP = [ # 0X00,0X01,0X80,0X02,0X80,0X01, #
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XC0,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0X80,0X07,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X01,0XFF,0XFF,0XFF,0XF0,0X7F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X9F,0XFF,0XFE,0X0F,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0XFF,0XFF,0XFF,0X80,0X1F,0XC0,0X1F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X07,0XFF,0XFE,0X0F,0XFF,0XFC,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X7F,0XFF,0XFF,0X00,0X1F,0X80,0X0F,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFE,0X0F,0XFF,0XF8,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X07,0XFF,0XFF,0XF8,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0X80,0X3F,0XFF,0XFF,0X0F,0X0F,0X07,0X0F,0XFC,0X7F,0XFF,0XF0,0X06,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFF,0XFE,0X1F,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X00,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X07,0XFF,0XFF,0XF0,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0XC0,0X0F,0XFF,0XFE,0X1F,0X8F,0X0F,0X87,0XFC,0X7F,0XFF,0XC0,0X0E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0X3F,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X00,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X07,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XE0,0X07,0XFF,0XFE,0X1F,0X87,0X1F,0XC7,0XFC,0X7F,0XFF,0X80,0X1E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X01,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFE,0X00,0X00,0X07,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XF0,0X03,0XFF,0XFE,0X1F,0X87,0X1F,0XC7,0XFC,0X7F,0XFF,0X00,0X7E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X01,
0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X10,0X07,0XFF,0XFF,0XC0,0X0F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XF8,0X01,0XFF,0XFE,0X1F,0X87,0X1F,0XC7,0XFC,0X7F,0XFE,0X00,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X01,
0X00,0X7F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XFF,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X30,0X07,0XFF,0XFF,0X80,0X1F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFE,0X00,0X7F,0XFE,0X0F,0X87,0X0F,0X87,0XFC,0X7F,0XF8,0X01,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFC,0X07,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X03,
0X80,0X7F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XFC,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X80,0XF0,0X07,0XFF,0XFF,0X80,0X3F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X00,0X3F,0XFF,0X00,0X07,0X00,0X07,0X00,0X03,0XF0,0X03,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X0F,0XC0,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X0F,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X03,
0X80,0X7F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XF8,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X83,0XF0,0X07,0XFF,0XFF,0X80,0X7F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X80,0X1F,0XFF,0X80,0X07,0X80,0X07,0X00,0X03,0XE0,0X07,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0X00,0X00,0X3E,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XC0,0X03,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X03,
0X80,0X3F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XF0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0X8F,0XF0,0X07,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XC0,0X0F,0XFF,0XE0,0X87,0XE0,0XC7,0X00,0X03,0XC0,0X0F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X3E,0X00,0X00,0X0F,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X00,0X01,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X07,
0X80,0X3F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XDF,0XF0,0X07,0XFF,0XFF,0X00,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF0,0X07,0XFF,0XFF,0X8F,0XFF,0XC7,0XFC,0X7F,0X80,0X3F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X00,0X00,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF8,0X07,
0XC0,0X3F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X0F,0XC0,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF8,0X01,0XFF,0XFF,0X8F,0XFF,0X87,0XFC,0X7E,0X00,0X7F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X00,0X00,0X7F,0XFE,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF8,0X07,
0XC0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X1F,0XF0,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFC,0X00,0XFF,0XFF,0X0F,0XFF,0X0F,0XFC,0X7C,0X00,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X0F,0XE0,0X7F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF8,0X07,
0XC0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X3F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XF8,0X07,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFE,0X00,0X7F,0XFE,0X1F,0XFE,0X0F,0XFC,0X78,0X01,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X3F,0XF0,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XC0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X80,0X00,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0X80,0X3F,0X00,0X3F,0X80,0X1F,0XFC,0X70,0X07,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X7F,0XF8,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XE0,0X1F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X3F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XC0,0X0F,0X00,0X7F,0X80,0X3F,0XFC,0X40,0X0F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XE0,0X0F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XFC,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XE0,0X07,0X03,0XFF,0X81,0XFF,0XFF,0X80,0X1F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XF0,0X0F,
0XE0,0X0F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XFC,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X07,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XF0,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X3F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XE0,0X1F,
0XF0,0X0F,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XFC,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X07,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFC,0X01,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XE0,0X1F,
0XF0,0X07,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X00,0X00,0X03,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFE,0X00,0X7F,0XFF,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFF,0X8F,0XFF,0X01,0XFF,0XFE,0X3F,0XFF,0XE0,0X1F,
0XF0,0X07,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X7F,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0X1F,0XE0,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X00,0X3F,0XFF,0XFF,0XFF,0XF0,0X03,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X1F,0XFE,0X3F,0XFF,0XFE,0X07,0XFE,0X00,0X7F,0XFE,0X3F,0XFF,0XC0,0X3F,
0XF0,0X07,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X3F,0XF0,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XF8,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X80,0X1F,0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFE,0X00,0X1F,0X00,0X00,0X00,0X00,0X1F,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFC,0X3F,0XFE,0X3F,0XFF,0XFC,0X03,0XFC,0X00,0X3F,0XFE,0X3F,0XFF,0XC0,0X3F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X1F,0XF0,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XC0,0X1F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X0F,0X00,0X00,0X00,0X00,0X1E,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XFC,0X61,0XF8,0X7E,0X1F,0XFE,0X3F,0XFF,0XC0,0X3F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X0F,0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0XFF,0X00,0X3F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X0F,0X00,0X00,0X00,0X00,0X1C,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X3F,0XFF,0XF8,0XF1,0XF0,0XFF,0X9F,0XFE,0X3F,0XFF,0X80,0X3F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XFE,0X00,0X7F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFC,0X00,0X0F,0X00,0X00,0X00,0X00,0X1C,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X3F,0XFE,0X00,0X1F,0XF9,0XF8,0XF1,0XFF,0X8F,0XFE,0X3F,0XFF,0X80,0X7F,
0XF8,0X03,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XF0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFE,0X00,0X0F,0X00,0X00,0X00,0X00,0X1E,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X7F,0XFE,0X00,0X07,0XF9,0XF8,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0X80,0X00,
0X00,0X01,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XFF,0XF8,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X7F,0XFE,0X00,0X01,0XF9,0XF0,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0X80,0X00,
0X00,0X01,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XE0,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XE0,0XFF,0XFE,0X00,0X00,0XFC,0XE1,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0X00,0X00,
0X00,0X01,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0XC0,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XF0,0X00,0X0F,0XFF,0XC0,0X00,0X3F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0XFF,0XFF,0XFF,0X00,0X7C,0X01,0XF3,0XFF,0XFF,0XFE,0X3F,0XFF,0X00,0X00,
0X00,0X00,0XFF,0X80,0X7F,0XFC,0X01,0XFF,0X80,0X70,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0X07,0XFF,0X80,0X00,0X1F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC1,0XFF,0XFF,0XFF,0XE0,0X7E,0X03,0XF3,0XFF,0XFF,0XFE,0X3F,0XFF,0X00,0X00,
0X00,0X00,0XFF,0X80,0X7F,0XF8,0X01,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFE,0X00,0XFF,0XFE,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X80,0X10,0X03,0XFF,0X00,0X20,0X0F,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0X83,0XFF,0XFF,0XFF,0XF8,0X3F,0X8F,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X00,0X00,
0X00,0X00,0XFF,0X80,0X7F,0XF8,0X01,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0X7F,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFF,0X00,0X3C,0X01,0XFE,0X00,0XF0,0X03,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0X03,0XFF,0XFF,0XFF,0XFC,0X3F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X00,0X00,
0X00,0X00,0XFF,0X80,0X3F,0XF0,0X01,0XFF,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0X7F,0XFC,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFE,0X00,0X7E,0X00,0X78,0X01,0XF8,0X01,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFE,0X07,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X01,0XFF,
0XFF,0X00,0X7F,0X80,0X3F,0XE0,0X01,0XFF,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0X00,0X3F,0XF8,0X01,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XFC,0X00,0XFF,0X00,0X30,0X03,0XFC,0X00,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFC,0X0F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFE,0X01,0XFF,
0XFF,0X00,0X7F,0X80,0X3F,0XC0,0X01,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0X80,0X1F,0XF0,0X03,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XF0,0X03,0XFF,0X80,0X00,0X07,0XFF,0X00,0X7F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF8,0X1F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFC,0X03,0XFF,
0XFF,0X00,0X7F,0XC0,0X0F,0X00,0X01,0XFF,0X80,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0X80,0X0F,0XE0,0X03,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XE0,0X07,0XFF,0XC0,0X00,0X1F,0XFF,0X80,0X1F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF8,0X3F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFC,0X03,0XFF,
0XFF,0X00,0X3F,0XC0,0X00,0X00,0X01,0XFF,0X80,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XC0,0X00,0X00,0X07,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0XC0,0X0F,0XFF,0XF0,0X00,0X3F,0XFF,0XC0,0X0F,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X7F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XFC,0X03,0XFF,
0XFF,0X80,0X3F,0XC0,0X00,0X03,0X01,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XE0,0X00,0X00,0X0F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0X80,0X1F,0XFF,0XF8,0X00,0X7F,0XFF,0XE0,0X07,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XE0,0X7F,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XF8,0X03,0XFF,
0XFF,0X80,0X3F,0XE0,0X00,0X06,0X01,0XFF,0XC0,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XE0,0X00,0X00,0X1F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFF,0X00,0X7F,0XFF,0XFC,0X00,0XFF,0XFF,0XF8,0X03,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0X00,0X00,0X1F,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XC0,0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XF3,0XFF,0XFF,0XFE,0X3F,0XF8,0X07,0XFF,
0XFF,0X80,0X1F,0XF0,0X00,0X0E,0X01,0XFF,0XF0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X00,0X3F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XFC,0X00,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0XFC,0X00,0XFF,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XC0,0X00,0X7E,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X81,0XFF,0XFF,0XFF,0XFF,0XFC,0X3F,0XFF,0XF3,0XFF,0XCF,0XFE,0X3F,0XF8,0X07,0XFF,
0XFF,0XC0,0X1F,0XF8,0X00,0X1E,0X01,0XFF,0XE0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XFC,0X00,0X00,0X7F,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF8,0X01,0XFF,0XFF,0XFF,0X87,0XFF,0XFF,0XFE,0X00,0X7F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XF0,0X01,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X03,0XFF,0XFF,0XFF,0XFF,0XFC,0X3F,0XFF,0XF3,0XFF,0XCF,0XFE,0X3F,0XF0,0X07,0XFF,
0XFF,0XC0,0X1F,0XFC,0X00,0X3E,0X01,0XFF,0XC0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XFF,0X00,0X03,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XF0,0X03,0XFF,0XFF,0XFF,0XDF,0XFF,0XFF,0XFF,0X00,0X3F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X1F,0XFF,0XFF,0XFF,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X07,0XFF,0XFF,0XFF,0XFF,0XF8,0X7F,0XFF,0XF3,0XFF,0XCF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFF,0XFF,0XFF,0X80,0X3F,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X1F,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X1F,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X0F,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XF3,0XFF,0X8F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X7F,0XFE,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X80,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X07,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFC,
0X0F,0XFF,0XFF,0XFF,0XFF,0XE0,0XFF,0XFF,0XF1,0XFF,0X8F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFF,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X03,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X80,0XFF,0XFF,0XFF,0XFF,0XE0,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF9,0XFF,0X81,0XFF,0XFF,0XF8,0XFF,0X1F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFE,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X01,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFF,0XFE,0X1F,0XFF,0XF0,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X3F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XFC,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFE,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFE,0X0F,0XFF,0XF8,0X1F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X7F,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XF0,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X3E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X07,0XFF,0XFE,0X0F,0XFF,0XFC,0X1F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0X00,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0XFF,0XFE,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XE0,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X1E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X0F,0XFF,0XFE,0X0F,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,
0X00,0X00,0X07,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X7F,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0E,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X3F,0XF0,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X01,
0X80,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X06,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFC,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X80,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X80,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,
0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFC,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X7F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,
0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,
0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X01,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,
0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,0X00,0X00,0X7F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X00,0X07,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X00,0X7F,0X80,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X7F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X03,0XFF,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X01,0XFF,0XF0,0X00,0X00,0XFF,0XFF,0XFF,0XF0,0X00,0X03,0XFE,0X00,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X07,0XFF,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X3F,0XFF,0XFF,0X00,0X00,0X7F,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XC0,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XFC,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X1F,0XFF,0XFE,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF3,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XE0,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X3F,0XFF,0XF8,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X3F,0XFF,0XFF,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XF8,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF8,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X08,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X7F,0XFF,0XFF,0X80,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,0XFF,0XF8,0X03,0XFF,0XFF,0XFC,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF8,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X38,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0XFF,0XFF,0XFF,0X80,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFC,0X07,0XFF,0XFF,0XFC,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF0,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0XFF,0XFF,0XFF,0XC0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,0XFE,0X0F,0XFF,0XFF,0XFE,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XF0,0X00,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X03,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,0XFF,0X1F,0XFF,0XFF,0XFE,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XF0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X07,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,0XFF,0XFF,0X3F,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFC,0X00,0X00,0X0F,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XE0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X00,0X00,0X3F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X1F,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X40,0X00,0X3F,0XFF,0XFF,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X7F,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,
0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X40,0X00,0X3F,0XFF,0XFF,0XFC,0X00,0X00,0X03,0XFF,0XC0,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,
0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X40,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X00,0X03,0XFF,0X80,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,
0XFF,0XFF,0XFF,0XC0,0X00,0X1F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X00,0X03,0XFF,0X80,0X00,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XFF,0X80,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X20,0X01,0XFF,0X80,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XFE,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X1F,0XFF,0XFF,0XFC,0X00,0X20,0X01,0XFF,0X00,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XFC,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,
0XE0,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X20,0X01,0XFF,0X00,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XF8,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,
0XF0,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X30,0X01,0XFF,0X00,0X08,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XE0,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,
0XF0,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X30,0X00,0XFF,0X00,0X18,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X1F,
0XFF,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,
0XF0,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X30,0X00,0XFE,0X00,0X18,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,
0XF0,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X30,0X00,0XFE,0X00,0X18,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XFE,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,
0XF8,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X38,0X00,0X7E,0X00,0X38,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XFC,0X00,0X01,0X00,0X0F,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,
0XF8,0X00,0X07,0XFF,0XFF,0XFC,0X00,0X38,0X00,0X7C,0X00,0X38,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XF0,0X00,0X07,0X00,0X0F,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XF8,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X38,0X00,0X7C,0X00,0X38,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XE0,0X00,0X0F,0X00,0X0F,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,
0XFC,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X38,0X00,0X7C,0X00,0X78,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0XC0,0X00,0X1F,0X00,0X0F,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,
0XFC,0X00,0X03,0XFF,0XFF,0XF8,0X00,0X3C,0X00,0X3C,0X00,0X78,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFF,0X00,0X00,0X7F,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,
0XFC,0X00,0X01,0XFF,0XFF,0XF8,0X00,0X3C,0X00,0X38,0X00,0X78,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFE,0X00,0X00,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,
0XFC,0X00,0X01,0XFF,0XFF,0XF8,0X00,0X3C,0X00,0X38,0X00,0XF8,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XFC,0X00,0X01,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X7F,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,
0XFE,0X00,0X01,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X38,0X00,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XF0,0X00,0X07,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,
0XFE,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X38,0X00,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XE0,0X00,0X0F,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X0F,
0XFE,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X10,0X01,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0XC0,0X00,0X1F,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X01,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,
0XFE,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3E,0X00,0X10,0X01,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3F,
0X00,0X00,0X7F,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,
0XFF,0X00,0X00,0XFF,0XFF,0XF8,0X00,0X3F,0X00,0X10,0X01,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3E,
0X00,0X00,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,
0XFF,0X00,0X00,0X7F,0XFF,0XF8,0X00,0X3F,0X00,0X10,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X3C,
0X00,0X01,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,
0XFF,0X00,0X00,0X7F,0XFF,0XF8,0X00,0X3F,0X00,0X00,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X30,
0X00,0X07,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,
0XFF,0X80,0X00,0X7F,0XFF,0XF8,0X00,0X3F,0X00,0X00,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X20,
0X00,0X0F,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,
0XFF,0X80,0X00,0X3F,0XFF,0XF8,0X00,0X3F,0X80,0X00,0X03,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,
0X00,0X1F,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,
0XFF,0X80,0X00,0X3F,0XFF,0XF8,0X00,0X3F,0X80,0X00,0X07,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,
0X00,0X7F,0XFF,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X7F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,
0XFF,0X80,0X00,0X3F,0XFF,0XF8,0X00,0X3F,0X80,0X00,0X07,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,0X00,
0X00,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,
0XFF,0XC0,0X00,0X1F,0XFF,0XF8,0X00,0X7F,0XC0,0X00,0X07,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X01,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X7F,
0XFF,0XC0,0X00,0X1F,0XFF,0XF8,0X00,0X7F,0XC0,0X00,0X0F,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X07,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,
0XFF,0XC0,0X00,0X1F,0XFF,0XF8,0X00,0X7F,0XC0,0X00,0X0F,0XFC,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X0F,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,
0XFF,0XC0,0X00,0X1F,0XFF,0XF0,0X00,0X7F,0XC0,0X00,0X0F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X3F,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,
0XFF,0XE0,0X00,0X0F,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X1F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0X7F,0XFF,0XFF,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,
0XFF,0XE0,0X00,0X0F,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X1F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFE,0X00,0X00,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,
0XFF,0XE0,0X00,0X0F,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X1F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X03,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XE0,0X00,0X3F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XF0,0X00,0X3F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XFC,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XF0,0X00,0X3F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X00,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,
0XFF,0XF0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XF0,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X07,
0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,
0X00,0X00,0X00,0X03,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X01,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,
0X00,0X00,0X00,0X03,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X03,
0XFF,0XFF,0XFF,0XF0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,
0X00,0X00,0X00,0X03,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0X7F,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XF0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X7F,0XF8,0X00,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X7F,0XFC,0X00,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0XFF,0XFF,0XFF,0XC0,0X01,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X01,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0XFF,0XFF,0XFF,0XC0,0X01,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X07,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,
0X7F,0XFF,0XFF,0X80,0X03,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X3F,0XFF,0XFF,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,
0X1F,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,
0X0F,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X7F,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X0F,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,
0X03,0XFF,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,0XFF,0XF0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X7F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFC,0X00,
0X00,0X7F,0X80,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XCF,0XFF,0XFF,0XFF,0X00,0X00,0X3F,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X7F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFE,0X00,
0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X1F,0XFF,0XE0,0X00,0X00,0X7F,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,
0XFF,0XFF,0X00,0X00,0X3F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0X00,
0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,
0XFF,0XFF,0X80,0X00,0X3F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0X80,
0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X3F,0XFF,
0XFF,0XFF,0X80,0X00,0X3F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XC0,
0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X7F,0XFF,
0XFF,0XFF,0X80,0X00,0X1F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XE0,
0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X7F,0XFF,
0XFF,0XFF,0X80,0X00,0X1F,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X7F,0XFF,
0XFF,0XFF,0XC0,0X00,0X1F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFC,
0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X1F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X0F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XE0,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,
0XC0,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0XFF,0XFF,
0XFF,0XFF,0XC0,0X00,0X0F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,
0XFE,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,
0XF0,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,0X00,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XE0,0X00,0X0F,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XE0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XE0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X01,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X07,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X03,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,0XFF,0XFF,
0XFF,0XFF,0XF0,0X00,0X03,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XF8,0X00,0X03,0XE0,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X00,0XFF,0XFF,0XFF,0X00,0X00,
0X7F,0XC0,0X00,0X00,0X00,0X01,0XFF,0X8F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,
0XFF,0XFF,0XFF,0XFF,0X81,0XFE,0X00,0X00,0X00,0X00,0XFE,0X00,0X00,0X7F,0XFF,0XFF,
0X80,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XE0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XC0,0X00,
0X7F,0XE0,0X00,0X00,0X00,0X01,0XFF,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,
0XFF,0XFF,0XFF,0XFF,0X81,0XFE,0X00,0X00,0X00,0X00,0XFE,0X00,0X03,0XFF,0XFF,0XFF,
0XE0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X1F,0XFF,0XFF,0XFF,0XF0,0X00,
0X3F,0XE0,0X00,0X00,0X00,0X03,0XFF,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X01,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0X0F,0XFF,0XFF,0XFF,
0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0X3F,0XFF,0XFF,0XFF,0XFC,0X00,
0X3F,0XF0,0X00,0X00,0X00,0X03,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0X1F,0XFF,0XFF,0XFF,
0XFE,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,
0X1F,0XF0,0X00,0X00,0X00,0X07,0XFE,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X07,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0X7F,0XFF,0XFF,0XFF,
0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF7,0XFF,0XFF,0XC0,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,
0X1F,0XF8,0X00,0X00,0X00,0X07,0XFC,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X00,0XFF,0XFF,0XFF,0XFF,
0XFF,0X80,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE7,0XFF,0XFF,0XE0,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,
0X0F,0XF8,0X00,0X00,0X00,0X0F,0XFC,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XC0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC7,0XFF,0XFF,0XF0,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X0F,0XFC,0X00,0X00,0X00,0X0F,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X01,0XFF,0XFF,0XFF,0XFF,
0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X87,0XFF,0XFF,0XF8,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X07,0XFC,0X00,0X00,0X00,0X1F,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XE0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X07,0XFF,0XFF,0XFC,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,
0X07,0XFE,0X00,0X00,0X00,0X1F,0XF0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X03,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF0,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XFF,0XFF,0XFE,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X07,0XFF,0XC0,0X00,0X01,0XFF,0XE0,
0X03,0XFE,0X00,0X00,0X00,0X3F,0XE0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XF8,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XE0,0X00,0X01,
0XFF,0XF0,0X0F,0XE0,0X00,0X00,0X00,0X3F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X07,0XFF,0XFF,0XFE,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0X00,0X00,0X00,0X7F,0XF0,
0X03,0XFF,0X00,0X00,0X00,0X3F,0XE0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XE0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0X80,0X00,0X00,
0X7F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X07,0XFF,0XFF,0XFF,0X00,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFE,0X00,0X00,0X00,0X3F,0XF0,
0X01,0XFF,0X00,0X00,0X00,0X7F,0XC0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XE0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0X00,0X00,0X00,
0X1F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X07,0XFF,0XFF,0XFF,0X80,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFC,0X00,0X00,0X00,0X1F,0XF0,
0X01,0XFF,0X80,0X00,0X00,0X7F,0XC0,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XE0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFC,0X00,0X00,0X00,
0X0F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFF,0XC0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF8,0X00,0X00,0X00,0X0F,0XF0,
0X00,0XFF,0X80,0X00,0X00,0XFF,0X80,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XF0,
0X00,0X00,0X00,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFC,0X00,0X00,0X00,
0X0F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X07,0XFF,0XFF,0XFF,0XC0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF0,0X00,0X00,0X00,0X0F,0XF0,
0X00,0XFF,0XC0,0X00,0X00,0XFF,0X80,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X1F,0XF8,
0X00,0X00,0X00,0X00,0X01,0XFE,0X00,0X00,0X00,0X01,0XFE,0X07,0XF8,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X07,0XFF,0XFF,0XFF,0XE0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF0,0X00,0X00,0X00,0X0F,0XF0,
0X00,0X7F,0XC0,0X00,0X01,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0X80,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF8,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X07,0XFF,0XFF,0XFF,0XE0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X7F,0XE0,0X00,0X01,0XFF,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X1F,0XFF,
0XFF,0XFF,0XFF,0XF0,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XF0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X3F,0XE0,0X00,0X03,0XFE,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFC,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0XF0,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X3F,0XF0,0X00,0X03,0XFE,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFE,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,0XFF,0XFF,0XF8,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X1F,0XF0,0X00,0X07,0XFC,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XF8,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X1F,0XF8,0X00,0X07,0XFC,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X07,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X0F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X07,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X0F,0XF8,0X00,0X0F,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X03,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X1F,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X07,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X07,0XFC,0X00,0X1F,0XF8,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X07,0XFF,0XFF,0XFF,0XFC,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XF0,0X00,0X00,0X00,0X0F,0XF0,
0X00,0X07,0XFC,0X00,0X1F,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X3F,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XF8,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFE,0X00,0X07,0XFF,0X80,0X7F,0XFE,0X00,0X07,0XF8,0X07,0XFF,0XFE,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X07,0XFE,0X00,0X3F,0XF0,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X00,0X01,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFF,0XFF,0XFF,0XFF,0XFF,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X01,0XFE,0X00,0X03,0XFF,0X00,0X3F,0XFE,0X00,0X07,0XF0,0X03,0XFF,0XFE,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X03,0XFE,0X00,0X3F,0XE0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0XFF,0XC1,0XFE,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFE,0X00,0X03,0XFE,0X00,0X3F,0XFC,0X00,0X27,0XF0,0X03,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X03,0XFF,0X00,0X7F,0XE0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X00,0X03,0XFE,0X00,0X3F,0XFC,0X00,0X3F,0XE0,0X01,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X01,0XFF,0X00,0X7F,0XC0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X00,0X01,0XFE,0X00,0X1F,0XF8,0X00,0X7F,0XE0,0X01,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0XFF,0X80,0XFF,0XC0,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X01,0XFC,0X00,0X1F,0XF8,0X00,0X7F,0XC0,0X00,0XFF,0XFF,0X00,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0XFF,0X80,0XFF,0X80,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0X80,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X01,0XFC,0X00,0X0F,0XF0,0X00,0X7F,0XC0,0X00,0XFF,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0XFF,0XC1,0XFF,0X80,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFE,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X00,0XFC,0X00,0X0F,0XF0,0X00,0XFF,0XC0,0X00,0X7F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X7F,0XC1,0XFF,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0X80,0X00,0XF8,0X00,0X0F,0XF0,0X00,0XFF,0X80,0X00,0X7F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X7F,0XE3,0XFF,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XF0,0XFF,0XFE,0X00,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XC0,0X00,0XF8,0X00,0X07,0XE0,0X01,0XFF,0X80,0X00,0X3F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,
0X00,0X00,0X3F,0XE3,0XFE,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XFF,0XFF,0XFF,0XFF,
0XFF,0XF8,0X0F,0XE0,0X7F,0XFF,0X00,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XC0,0X00,0X78,0X00,0X07,0XE0,0X01,0XFF,0X00,0X00,0X3F,0XFF,0X80,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X1F,0XF3,0XFC,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X1F,0XFF,0X80,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XC0,0X00,0X70,0X00,0X07,0XC0,0X03,0XFF,0X00,0X00,0X1F,0XFF,0XC0,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X1F,0XFF,0XFC,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X0F,0XFF,0XE0,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XE0,0X00,0X70,0X00,0X03,0XC0,0X03,0XFE,0X00,0X00,0X1F,0XFF,0XC0,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X0F,0XFF,0XF8,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X1F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X03,0XFF,0XF0,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XE0,0X00,0X30,0X00,0X03,0X80,0X03,0XFE,0X00,0X00,0X0F,0XFF,0XC0,0X03,
0XFC,0X00,0X00,0X03,0XFC,0X00,0X00,0X0F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X0F,0XFF,0XF8,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X01,0XFF,0XFC,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XE0,0X00,0X20,0X00,0X01,0X80,0X07,0XFE,0X00,0X00,0X0F,0XFF,0XC0,0X03,
0XFF,0X00,0X00,0X03,0XFC,0X00,0X00,0X1F,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X07,0XFF,0XF0,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X7F,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X7F,0XFE,0X00,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XF0,0X00,0X20,0X00,0X01,0X80,0X07,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X03,
0XFF,0XC0,0X00,0X03,0XFC,0X00,0X00,0XFF,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X07,0XFF,0XF0,0X00,0X00,0X0F,0XF8,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X03,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X3F,0XFF,0X80,0X00,0X7F,0X80,0X00,0X00,0X00,0X00,0X00,
0X01,0XFF,0XF0,0X00,0X00,0X00,0X01,0X00,0X0F,0XFC,0X00,0X00,0X07,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X03,0XFF,0XE0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X1F,0XFF,0XC0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF0,0X00,0X00,0X00,0X00,0X00,0X0E,0XF8,0X00,0X00,0X07,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X03,0XFF,0XE0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X07,0XFF,0XF0,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X1C,0X78,0X00,0X00,0X03,0XFF,0XC0,0X03,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X01,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0XC1,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X03,0XFF,0XF8,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X18,0X70,0X00,0X00,0X03,0XFF,0XC0,0X01,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X01,0XFF,0XC0,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X81,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0XFF,0XFC,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XF8,0X00,0X00,0X00,0X00,0X00,0X18,0XF0,0X00,0X40,0X01,0XFF,0XC0,0X00,
0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFF,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X7F,0XFF,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFC,0X00,0X00,0X10,0X00,0X00,0X38,0X80,0X00,0XC0,0X01,0XFF,0XC0,0X00,
0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XE0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0XFF,0X80,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFE,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X3F,0XFF,0X80,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFC,0X00,0X00,0X18,0X00,0X00,0X31,0X80,0X00,0XE0,0X00,0XFF,0XC0,0X00,
0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X7F,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XFC,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X0F,0XFF,0XE0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X01,0XFF,0XFE,0X00,0X00,0X18,0X00,0X00,0X71,0X00,0X00,0XE0,0X00,0XFF,0XC0,0X00,
0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0X00,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X7F,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFF,0XF0,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X07,0XFF,0XF0,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0XFF,0XFE,0X00,0X00,0X38,0X00,0X00,0X63,0X80,0X01,0XF0,0X00,0X7F,0XC0,0X00,
0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X00,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X3E,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFC,0X1F,0XFF,
0XFF,0XFF,0XFF,0XC0,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X01,0XFF,0XFC,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0XFF,0XFE,0X00,0X00,0X3C,0X00,0X00,0X63,0X00,0X01,0XF0,0X00,0X7F,0XC0,0X00,
0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,0X0F,0XE0,0X00,0X00,0X00,0X07,0XF0,
0X00,0X00,0X00,0X1C,0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X1F,0XFF,
0XFF,0XFF,0XFC,0X00,0X01,0XFC,0X00,0X00,0X00,0X00,0XFE,0X07,0XF0,0X00,0X00,0X00,
0X07,0XF8,0X0F,0XE0,0X00,0X00,0X00,0XFF,0XFE,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,
0X00,0XFF,0XFE,0X00,0X00,0X3C,0X00,0X00,0XC3,0X00,0X03,0XF8,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0XFF,0XFF,0X00,0X00,0X7C,0X00,0X00,0XC6,0X00,0X03,0XF8,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0XFF,0XFF,0X00,0X00,0X7E,0X00,0X00,0XC4,0X00,0X07,0XFC,0X00,0X1F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0XFF,0XFF,0X00,0X00,0X7E,0X00,0X01,0X84,0X00,0X07,0XFC,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0X80,0X00,0XFE,0X00,0X01,0X88,0X00,0X07,0XF8,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0X80,0X00,0XFF,0X00,0X03,0XCC,0X00,0X0F,0XF8,0X00,0X7F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0X80,0X00,0XFF,0X00,0X03,0XF8,0X00,0X0F,0XF0,0X00,0X7F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X7F,0XFF,0XC0,0X01,0XFF,0X80,0X07,0XF8,0X00,0X1F,0XF0,0X00,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X3F,0XFF,0XC0,0X01,0XFF,0X80,0X07,0XF0,0X00,0X1F,0XE0,0X00,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X3F,0XFF,0XC0,0X01,0XFF,0X80,0X07,0XF8,0X00,0X3F,0XE0,0X01,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X3F,0XFF,0XE0,0X03,0XFF,0XC0,0X0F,0XF8,0X00,0X3F,0XE0,0X01,0XFF,0XC0,0X00,
0X00,0X06,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X1F,0XFF,0XE0,0X03,0XFF,0XC0,0X0F,0XF8,0X00,0X3F,0XC0,0X03,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X1F,0XFF,0XE0,0X07,0XFF,0XC0,0X1F,0XFC,0X00,0X7F,0XC0,0X03,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X0F,0XFF,0XF0,0X07,0XFF,0XE0,0X1F,0XFC,0X00,0X7F,0X80,0X07,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X0F,0XFF,0XF0,0X07,0XFF,0XE0,0X1F,0XFE,0X00,0XF9,0X80,0X07,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,0XC0,0X00,
0X00,0X07,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X01,0XC0,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X38,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,0XC0,0X01,
0XF8,0X07,0X07,0XE0,0X00,0X0F,0XE0,0X40,0X43,0XC0,0X1F,0XE0,0X00,0X00,0X00,0X7F,
0X02,0X08,0X00,0X00,0X00,0X60,0X07,0XF8,0X00,0X0F,0XC0,0X00,0XFF,0X00,0X04,0X1F,
0X80,0X3F,0X00,0X00,0XFF,0X00,0X00,0X01,0XC1,0XF8,0X00,0X03,0XFC,0X18,0X10,0XF8,
0X03,0XF8,0X38,0X40,0X00,0X00,0X01,0X00,0X1F,0XC0,0X81,0X87,0X80,0X3F,0XC0,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XC0,0X03,
0XFE,0X07,0X1F,0XF8,0X00,0X3F,0XF8,0XE0,0XEF,0XC0,0X7F,0XF8,0X00,0X00,0X01,0XFF,
0XC7,0X1C,0X00,0X18,0X00,0X70,0X1F,0XFF,0X00,0X1F,0XF0,0X03,0XFF,0XC0,0X0E,0X7F,
0XE0,0XFF,0XC0,0X03,0XFF,0XC0,0X00,0X01,0XC7,0XFE,0X00,0X0F,0XFF,0X38,0X39,0XF8,
0X0F,0XFE,0X38,0X70,0X00,0X80,0X03,0X80,0X7F,0XF1,0XC1,0XDF,0X80,0XFF,0XF0,0X00,
0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X3F,0XFF,0XC0,0X07,
0X9F,0X07,0X7C,0XFC,0X00,0X7E,0X7E,0XE0,0XEE,0X80,0XFC,0XFC,0X00,0X00,0X07,0XE3,
0XE7,0X0E,0X00,0X38,0X00,0XE0,0X3F,0X3F,0XC0,0X3C,0XF8,0X07,0XE7,0XE0,0X0E,0X79,
0XF1,0XF7,0XE0,0X07,0XE7,0XE0,0X00,0X01,0XCF,0X9F,0X00,0X1F,0X9F,0X98,0X3B,0XB0,
0X3F,0X9F,0X38,0X70,0X01,0XC0,0X07,0X01,0XF8,0XF9,0XC1,0XFD,0X01,0XF9,0XF8,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X07,
0X07,0X07,0XF0,0X1C,0X00,0XF0,0X0F,0XE0,0XF8,0X01,0XE0,0X1E,0X00,0X00,0X0F,0X80,
0X7F,0X0E,0X00,0X38,0X00,0XE0,0X78,0X07,0XC0,0X38,0X38,0X0F,0X00,0XF0,0X0F,0XE0,
0X73,0X80,0XE0,0X0F,0X00,0XF0,0X00,0X01,0XFC,0X07,0X80,0X3C,0X03,0XF8,0X3F,0X00,
0X7C,0X03,0XB8,0X70,0X01,0XC0,0X07,0X03,0XE0,0X1D,0XC1,0XF0,0X03,0XC0,0X3C,0X00,
0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X0F,0XFF,0XC0,0X0E,
0X00,0X07,0XC0,0X0E,0X01,0XC0,0X07,0XE0,0XF0,0X03,0XC0,0X07,0X00,0X00,0X1E,0X00,
0X3F,0X07,0X00,0X7C,0X01,0XC0,0XF0,0X01,0XE0,0X70,0X00,0X1E,0X00,0X78,0X0F,0X80,
0X3F,0X00,0XF0,0X1E,0X00,0X38,0X00,0X01,0XF8,0X03,0X80,0X78,0X00,0XF8,0X3E,0X00,
0X70,0X01,0XF8,0X38,0X03,0XE0,0X0E,0X03,0X80,0X0F,0XC1,0XE0,0X07,0X80,0X1E,0X00,
0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X1F,0XFF,0XC0,0X0E,
0X00,0X07,0XC0,0X0E,0X03,0X80,0X03,0XE0,0XF0,0X03,0X80,0X03,0X80,0X00,0X1C,0X00,
0X1F,0X07,0X00,0X7C,0X01,0XC0,0XE0,0X00,0XF0,0X70,0X00,0X3C,0X00,0X3C,0X0F,0X80,
0X3E,0X00,0X70,0X1C,0X00,0X1C,0X00,0X01,0XF0,0X01,0XC0,0XF0,0X00,0X78,0X3C,0X00,
0XE0,0X00,0XF8,0X38,0X03,0XE0,0X0E,0X07,0X00,0X07,0XC1,0XE0,0X0F,0X00,0X0F,0X00,
0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0X7F,0XFF,0XC0,0X0E,
0X00,0X07,0X80,0X0E,0X07,0X80,0X01,0XE0,0XF0,0X07,0X00,0X03,0X80,0X00,0X3C,0X00,
0X0F,0X03,0X00,0XFC,0X01,0X81,0XC0,0X00,0XF0,0X70,0X00,0X38,0X00,0X1C,0X0F,0X00,
0X1C,0X00,0X70,0X38,0X00,0X1C,0X00,0X01,0XE0,0X01,0XC0,0XE0,0X00,0X78,0X3C,0X01,
0XE0,0X00,0X78,0X1C,0X03,0XE0,0X0E,0X0F,0X00,0X03,0XC1,0XC0,0X0E,0X00,0X07,0X00,
0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X00,0XFF,0XFF,0XC0,0X07,
0X00,0X07,0X00,0X06,0X07,0X00,0X00,0XE0,0XE0,0X07,0X00,0X01,0XC0,0X00,0X38,0X00,
0X0F,0X03,0X80,0XE6,0X03,0X81,0XC0,0X00,0X78,0X38,0X00,0X38,0X00,0X0E,0X0F,0X00,
0X1C,0X00,0X70,0X38,0X00,0X0E,0X00,0X01,0XE0,0X01,0XC0,0XE0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X1C,0X07,0X70,0X1C,0X0E,0X00,0X03,0XC1,0XC0,0X1C,0X00,0X03,0X00,
0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X01,0XFF,0XFF,0XC0,0X07,
0X80,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0F,0X00,0X01,0XC0,0X00,0X38,0X00,
0X07,0X03,0X80,0XCE,0X03,0X81,0XC0,0X00,0X78,0X3C,0X00,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X0E,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X0C,0X07,0X70,0X1C,0X0E,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X03,0X80,
0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X03,0XFF,0XFF,0XC0,0X03,
0XE0,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0F,0XFF,0XFF,0XC0,0X00,0X30,0X00,
0X07,0X01,0XC1,0XC7,0X07,0X03,0XFF,0XFF,0XF8,0X1F,0X00,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X7F,0XFF,0XFE,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X01,
0X80,0X00,0X38,0X0E,0X0E,0X38,0X38,0X1C,0X00,0X01,0XC1,0XC0,0X1F,0XFF,0XFF,0X80,
0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X07,0XFF,0XFF,0XC0,0X01,
0XF8,0X07,0X00,0X07,0X06,0X00,0X00,0XE0,0XE0,0X0F,0XFF,0XFF,0XC0,0X00,0X70,0X00,
0X07,0X01,0XC1,0XC7,0X07,0X03,0XFF,0XFF,0XF8,0X0F,0XC0,0X70,0X00,0X06,0X0E,0X00,
0X1C,0X00,0X70,0X7F,0XFF,0XFE,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X03,
0X80,0X00,0X38,0X0E,0X0E,0X38,0X38,0X1C,0X00,0X01,0XC1,0XC0,0X1F,0XFF,0XFF,0X80,
0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X0F,0XFF,0XFF,0XC0,0X00,
0X7C,0X07,0X00,0X07,0X06,0X00,0X00,0XE0,0XE0,0X0E,0X00,0X00,0X00,0X00,0X70,0X00,
0X07,0X00,0XE3,0X83,0X8E,0X03,0X80,0X00,0X00,0X03,0XE0,0X70,0X00,0X06,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X00,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X03,
0X80,0X00,0X38,0X07,0X1C,0X1C,0X70,0X1C,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X3F,0XFF,0XFF,0XC0,0X00,
0X1E,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0E,0X00,0X00,0X00,0X00,0X30,0X00,
0X07,0X00,0XE3,0X83,0X8E,0X03,0X80,0X00,0X00,0X00,0XF0,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X00,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X18,0X38,0X01,
0X80,0X00,0X38,0X07,0X1C,0X1C,0X70,0X1C,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0X7F,0XFF,0XFF,0XC0,0X00,
0X0F,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X0E,0X00,0X00,0X00,0X00,0X38,0X00,
0X07,0X00,0XE7,0X01,0XCC,0X01,0X80,0X00,0X00,0X00,0X78,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X70,0X00,0X00,0X00,0X01,0XC0,0X01,0XC1,0XC0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X03,0X18,0X0E,0X60,0X0E,0X00,0X01,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF8,0XFF,0XFF,0XFF,0XC0,0X00,
0X07,0X07,0X00,0X07,0X07,0X00,0X00,0XE0,0XE0,0X07,0X00,0X00,0X00,0X00,0X38,0X00,
0X0F,0X00,0X77,0X01,0XCC,0X01,0XC0,0X00,0X00,0X00,0X38,0X70,0X00,0X0E,0X0E,0X00,
0X1C,0X00,0X70,0X38,0X00,0X00,0X00,0X01,0XC0,0X01,0XC0,0XE0,0X00,0X38,0X38,0X01,
0XC0,0X00,0X38,0X03,0XB8,0X0E,0XE0,0X0E,0X00,0X03,0XC1,0XC0,0X1C,0X00,0X00,0X00,
0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XF9,0XFF,0XFF,0XFF,0XC0,0X00,
0X03,0X07,0X00,0X07,0X07,0X80,0X01,0XE0,0XE0,0X07,0X00,0X01,0X80,0X00,0X38,0X00,
0X0F,0X00,0X7E,0X01,0XFC,0X01,0XC0,0X00,0X70,0X00,0X38,0X38,0X00,0X1C,0X0E,0X00,
0X1C,0X00,0X70,0X38,0X00,0X0C,0X00,0X01,0XC0,0X01,0XC0,0XE0,0X00,0X38,0X38,0X01,
0XE0,0X00,0X78,0X01,0XF8,0X0F,0XE0,0X0E,0X00,0X03,0XC1,0XC0,0X0E,0X00,0X07,0X00,
0X00,0X00,0X00,0X0F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X03,0X07,0X00,0X07,0X03,0X80,0X01,0XE0,0XE0,0X07,0X80,0X03,0X80,0X00,0X1C,0X00,
0X1F,0X00,0X3E,0X00,0XF8,0X00,0XE0,0X00,0XF0,0X00,0X38,0X3C,0X00,0X1C,0X0E,0X00,
0X1C,0X00,0X70,0X3C,0X00,0X1C,0X00,0X01,0XC0,0X01,0XC0,0XF0,0X00,0X78,0X38,0X00,
0XE0,0X00,0XF8,0X01,0XF0,0X07,0XC0,0X07,0X00,0X07,0XC1,0XC0,0X0E,0X00,0X07,0X00,
0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X07,0X07,0X00,0X07,0X01,0XC0,0X07,0XE0,0XE0,0X03,0XC0,0X07,0X00,0X00,0X1E,0X00,
0X3F,0X00,0X3C,0X00,0XF8,0X00,0XF0,0X01,0XF0,0X00,0X38,0X1E,0X00,0X38,0X0E,0X00,
0X1C,0X00,0X70,0X1E,0X00,0X38,0X00,0X01,0XC0,0X01,0XC0,0X78,0X00,0XF8,0X38,0X00,
0X70,0X01,0XF8,0X01,0XF0,0X07,0XC0,0X07,0X80,0X0F,0XC1,0XC0,0X07,0X00,0X0E,0X00,
0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0C,
0X0F,0X07,0X00,0X07,0X01,0XF0,0X0F,0XE0,0XE0,0X01,0XE0,0X1F,0X00,0X00,0X0F,0X00,
0X7F,0X00,0X1C,0X00,0X70,0X00,0X78,0X03,0XE0,0X60,0X78,0X0F,0X00,0XF8,0X0E,0X00,
0X1C,0X00,0X70,0X0F,0X00,0XF8,0X00,0X01,0XC0,0X01,0XC0,0X3C,0X03,0XF8,0X38,0X00,
0X7C,0X03,0XF8,0X00,0XE0,0X03,0X80,0X03,0XC0,0X1F,0XC1,0XC0,0X03,0XC0,0X3C,0X00,
0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X0F,
0X1E,0X07,0X00,0X07,0X00,0XFC,0X7E,0XE0,0XE0,0X00,0XF8,0X7E,0X00,0X00,0X07,0XE3,
0XF7,0X00,0X1C,0X00,0X70,0X00,0X3F,0X1F,0XC0,0X7C,0XF0,0X07,0XC3,0XF0,0X0E,0X00,
0X1C,0X00,0X70,0X07,0XC3,0XF0,0X00,0X01,0XC0,0X01,0XC0,0X1F,0X8F,0X98,0X38,0X00,
0X3F,0X1F,0XB8,0X00,0XE0,0X03,0X80,0X01,0XF8,0XFD,0XC1,0XC0,0X03,0XF1,0XFC,0X00,
0X00,0X00,0X00,0X00,0X7F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X07,
0XFC,0X07,0X00,0X07,0X00,0X3F,0XF8,0XE0,0XE0,0X00,0X7F,0XF8,0X00,0X00,0X03,0XFF,
0XC7,0X00,0X18,0X00,0X30,0X00,0X1F,0XFF,0X00,0X3F,0XE0,0X03,0XFF,0XC0,0X0E,0X00,
0X1C,0X00,0X70,0X03,0XFF,0XC0,0X00,0X01,0XC0,0X01,0XC0,0X0F,0XFF,0X38,0X38,0X00,
0X0F,0XFE,0X38,0X00,0X40,0X01,0X00,0X00,0X7F,0XF1,0XC1,0XC0,0X00,0XFF,0XF0,0X00,
0X00,0X00,0X00,0X00,0X1F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X03,
0XF8,0X07,0X00,0X06,0X00,0X1F,0XF0,0XE0,0XE0,0X00,0X1F,0XF0,0X00,0X00,0X00,0XFF,
0X06,0X00,0X00,0X00,0X30,0X00,0X07,0XFE,0X00,0X1F,0XC0,0X00,0XFF,0X00,0X06,0X00,
0X18,0X00,0X30,0X00,0XFF,0X80,0X00,0X00,0XC0,0X01,0XC0,0X03,0XFC,0X18,0X38,0X00,
0X07,0XFC,0X38,0X00,0X00,0X00,0X00,0X00,0X3F,0XC1,0X81,0X80,0X00,0X3F,0XC0,0X00,
0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X01,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X3F,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X07,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X03,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XFF,0XC0,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,0X00,
]
| 81.402564
| 81
| 0.780023
| 30,906
| 158,735
| 4.006212
| 0.006018
| 0.971829
| 1.325354
| 1.617755
| 0.960942
| 0.944628
| 0.93374
| 0.921109
| 0.910997
| 0.892776
| 0
| 0.340957
| 0.025974
| 158,735
| 1,949
| 82
| 81.44433
| 0.459861
| 0.00732
| 0
| 0.58949
| 0
| 0
| 0
| 0
| 0
| 0
| 0.789753
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
d19062e681500d100909c239d9c227ac6cd5dcaf
| 260,421
|
py
|
Python
|
leetcode/hard/smallest_range/srcs/a.py
|
BillionsRichard/pycharmWorkspace
|
709e2681fc6d85ff52fb25717215a365f51073aa
|
[
"Apache-2.0"
] | null | null | null |
leetcode/hard/smallest_range/srcs/a.py
|
BillionsRichard/pycharmWorkspace
|
709e2681fc6d85ff52fb25717215a365f51073aa
|
[
"Apache-2.0"
] | null | null | null |
leetcode/hard/smallest_range/srcs/a.py
|
BillionsRichard/pycharmWorkspace
|
709e2681fc6d85ff52fb25717215a365f51073aa
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
"""
@version: v1.0
@author: Richard
@license: Apache Licence
@contact: billions.richard@qq.com
@site:
@software: PyCharm
@time: 2019/9/12 20:37
"""
from pprint import pprint as pp
from operator import itemgetter
class Solution:
"""
输入:[[4,10,15,24,26], [0,9,12,20], [5,18,22,30]]
输出: [20,24]
"""
def smallestRange(self, nums):
k = len(nums)
k_tagged_merged_list = []
for i in range(k):
row = nums[i]
k_tagged_merged_list.extend([(e, i) for e in row])
k_tagged_merged_list.sort(key=itemgetter(0))
print(k_tagged_merged_list)
min_range = None
min_range_len = None
# print('min_range_len', min_range_len)
tot_len = len(k_tagged_merged_list)
# print('tot_len', tot_len)
k_prev_tag_list = []
prev_j = None
i = 0
while i < tot_len:
j = 0
k_tmp_tags_list = []
if not k_prev_tag_list:# 第一次为空,后续非空
while i+j < tot_len:
tag = k_tagged_merged_list[i+j][1]
k_tmp_tags_list.append(tag)
tag_set = set(k_tmp_tags_list)
if len(tag_set) == k:
print('found')
k_prev_tag_list = k_tmp_tags_list
prev_j = j
break
j += 1
else:
j = prev_j + 1
while i+j < tot_len:
tag = k_tagged_merged_list[i + j][1]
k_prev_tag_list.append(tag)
tag_set = set(k_prev_tag_list)
if len(tag_set) == k:
print('found')
prev_j = j
break
j += 1
else:
i += 1
k_prev_tag_list.pop(0)
continue
k = 1
while k_prev_tag_list[k] == k_prev_tag_list[0]:
k+= 1
k-= 1
i = k
start_index, stop_index = i, i+j
k_tmp_range = k_tagged_merged_list[start_index][0], k_tagged_merged_list[stop_index][0]
k_tmp_range_len = k_tmp_range[1] - k_tmp_range[0]
if min_range_len is None or min_range_len > k_tmp_range_len:#\
# or (min_range_len == k_tmp_range_len and k_tmp_range[0] < min_range[0]):
min_range_len = k_tmp_range_len
min_range = k_tmp_range
# print('k_tmp_range', k_tmp_range)
# print('k_tmp_range_len', k_tmp_range_len)
# print('min_range', min_range)
# print('*'*80)
i += 1
k_prev_tag_list.pop(0)
return min_range
if __name__ == '__main__':
s = Solution()
nums = [[4, 10, 15, 24, 26], [0, 9, 12, 20], [5, 18, 22, 30]]
# nums = [[10],[11]]
nums = [[11,38,83,
84,84,85,88,89,89,92],[28,61,89],[52,77,79,80,81],[21,25,26,26,26,27],[9,83,85,90],[84,85,87],[26,68,70,71],[36,40,41,42,45],[-34,21],[-28,-28,-23,1,13,21,28,37,37,38],[-74,1,2,22,33,35,43,45],[54,96,98,98,99],[43,54,60,65,71,75],[43,46],[50,50,58,67,69],[7,14,15],[78,80,89,89,90],[35,47,63,69,77,92,94]]
nums = [[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000],[95387,95790,97307,98168,99868,99995,99995,100000],[-69454,-17042,8172,50983,63432,72854,73012,80848,83723,85916,91759,99779,99913,99944,99994,99999,99999],[65641,95910,97995,98196,98969,99008,99591,99732,99735,99896,99952,99989,99999,100000],[57459,95855,97360,98320,99147,99865,99955,99989,99997,99998,100000],[-81589,-3474,84141,92981,95255,99192,99962,99970,99994,99998,99999,100000],[-23262,92924,95548,96462,99338,99553,99555,99569,99644,99903,99909,99999,99999,100000],[-58466,24432,87898,92795,95701,98143,98163,99182,99351,99746,99811,99943,99955,99978,99997,100000],[-97588,7867,10356,20288,67836,69868,73038,77753,81937,88474,89979,92182,98091,99635,99902,99941,99975,99987,99991,99998,99998,99998,99998,99998,99999,99999,99999,100000],[-96955,41521,84537,89794,96226,97103,97490,99347,99957,99997,100000],[-49247,93963,99006,99428,99964,99992,100000],[46062,48599,95745,98620,98677,99516,99802,99973,99993,100000],[-3786,59724,62870,80033,90471,98836,99395,99574,99682,99724,99909,99963,99979,99999,100000],[-62512,-19463,84187,89388,91368,95524,98987,99085,99230,99809,99978,100000],[18183,83019,98718,99570,99777,99980,100000],[19925,20448,81509,93698,98451,98776,98915,99007,99925,99994,99996,99999,100000],[-96129,93245,95417,98492,99013,99921,99934,99989,99995,100000],[-25468,61948,68372,85478,91239,98906,98988,99653,99915,99957,99998,99999,99999,100000],[36648,65266,95679,98905,99868,99977,99983,99983,99995,99995,99996,99997,100000],[56006,78969,86785,89834,92494,93887,98268,99771,99982,99998,99999,100000]]
min_range = s.smallestRange(nums)
print(min_range)
| 2,456.801887
| 257,052
| 0.802451
| 41,499
| 260,421
| 5.032145
| 0.008097
| 0.070795
| 0.038615
| 0.032179
| 0.99513
| 0.994871
| 0.994857
| 0.994206
| 0.993301
| 0.993301
| 0
| 0.801002
| 0.004865
| 260,421
| 105
| 257,053
| 2,480.2
| 0.004808
| 0.001974
| 0
| 0.294118
| 0
| 0
| 0.000069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014706
| false
| 0
| 0.029412
| 0
| 0.073529
| 0.073529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d1f4e833f90f28cccc5cdd66d3668356f1c15f93
| 11,381
|
py
|
Python
|
python/binocs/makeiso.py
|
bathompso/BINOCS
|
87bc117931915e6d15d8f455dfcbb5ce8ad91973
|
[
"MIT"
] | 2
|
2020-02-20T16:07:51.000Z
|
2021-02-17T09:21:20.000Z
|
python/binocs/makeiso.py
|
bathompso/BINOCS
|
87bc117931915e6d15d8f455dfcbb5ce8ad91973
|
[
"MIT"
] | null | null | null |
python/binocs/makeiso.py
|
bathompso/BINOCS
|
87bc117931915e6d15d8f455dfcbb5ce8ad91973
|
[
"MIT"
] | 1
|
2015-04-10T19:39:15.000Z
|
2015-04-10T19:39:15.000Z
|
# MAKEISO subroutines
from __future__ import print_function, division
import sys, subprocess
import numpy as np
def padova(path, outpath):
'''
SUBROUTINE: PADOVA
DESCRIPTION: Converts files downloaded from Padova's CMD web interface [http://stev.oapd.inaf.it/cgi-bin/cmd] to a usable format
INPUT: path -- Path of folder containing the downloaded Padova web files
outpath -- Path to folder to hold output
OUTPUT: NONE
FILE OUTPUT: '[outpath]/iso_[FeH].pv.syn.dat' -- File holding isochrone star information to be read into BINOCS
0: log[Age] of isochrone
1: Initial mass
2: Actual mass (at specified age)
3: log[Luminosity]
4: log[g] (surface gravity)
5: log[Temperature]
6: Bolometric magnitude
7-23: UBVRIugrizJHK[3][4][5][8] magnitudes
'''
# Detect what files are present in path directory
webfiles = subprocess.check_output("ls "+path+"*.dat", shell=True).splitlines()
# Loop through detected files and get [Fe/H]
webfeh, nlines = [], []
for f in range(len(webfiles)):
df = open(webfiles[f], 'r')
lines = df.read().splitlines()
df.close()
# Determine what line to read in (changes if SDSS filter file)
if lines[3].find('SDSS') >= 0: tmp = lines[11].split()
else: tmp = lines[10].split()
# Save [Fe/H] value for this file, which is scaled from Z
webfeh.append(np.log10(float(tmp[4]) / 0.01886))
nlines.append(len(lines))
# Find all unique [Fe/H] values, and print out formatted isochrone file
for u in np.unique(webfeh):
thisuni = [x for x in range(len(webfeh)) if np.abs(webfeh[x] - u) < 0.015]
thisnlines = max([nlines[x] for x in range(len(webfeh)) if np.abs(webfeh[x] - u) < 0.015])
# If we have all three types of files, we can print an output for this [Fe/H]
if len < 3: continue
# Determine output file name
if u < 0: outname = "%s/iso_m%03d.pv.syn.dat" % (outpath, -1.0*u*100.0)
else: outname = "%s/iso_p%03d.pv.syn.dat" % (outpath, u*100.0)
print("Printing isochrone for [Fe/H] = %5.2f to '%s'" % (u, outname))
# Loop through all webfiles for this [Fe/H] and read in data
data = np.zeros([thisnlines, 24])
for f in thisuni:
df = open(webfiles[f], 'r')
lines = df.read().splitlines()
df.close()
# Determine what file type this is
if lines[3].find('SDSS') >= 0:
print(" Reading SDSS+JHK file '%s'" % (webfiles[f]))
adj = 1
ftype = 2
elif lines[11].find('V') >= 0:
print(" Reading UBVRI file '%s'" % (webfiles[f]))
adj = 0
ftype = 1
else:
print(" Reading IRAC file '%s'" % (webfiles[f]))
adj = 0
ftype = 3
for i in range(len(lines)):
if lines[i].find('#') >= 0: continue
tmp = lines[i].split()
if len(tmp) == 0: continue
# Save parameters to array
for j in range(7): data[i-adj,j] = float(tmp[j])
# Save magnitudes to array
if ftype == 1:
for j in range(7, 12): data[i-adj,j] = float(tmp[j])
elif ftype == 2:
for j in range(7, 15): data[i-adj,j+5] = float(tmp[j])
else:
for j in range(7, 11): data[i-adj,j+13] = float(tmp[j])
# Print out newly matched file
of = open(outname, 'w')
for s in range(thisnlines):
# Check to see whether all magnitudes exist
badmag = [x for x in data[s,:] if x == 0 or x < -9.9]
if len(badmag) > 0: continue
# Print out star
print("%6.3f %7.4f %7.4f %7.4f %7.4f %7.4f %7.4f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f" % (data[s,0], data[s,1], data[s,2], data[s,3], data[s,4], data[s,5], data[s,6], data[s,7], data[s,8], data[s,9], data[s,10], data[s,11], data[s,12], data[s,13], data[s,14], data[s,15], data[s,16], data[s,17], data[s,18], data[s,19], data[s,20], data[s,21], data[s,22], data[s,23]), file=of)
of.close()
def parsec(path, outpath):
'''
SUBROUTINE: PARSEC
DESCRIPTION: Converts files downloaded from PARSEC's CMD web interface [http://stev.oapd.inaf.it/cgi-bin/cmd] to a usable format
INPUT: path -- Path of folder containing the downloaded PARSEC web files
outpath -- Path to folder to hold output
OUTPUT: NONE
FILE OUTPUT: '[outpath]/iso_[FeH].pc.syn.dat' -- File holding isochrone star information to be read into BINOCS
0: log[Age] of isochrone
1: Initial mass
2: Actual mass (at specified age)
3: log[Luminosity]
4: log[g] (surface gravity)
5: log[Temperature]
6: Bolometric magnitude
7-23: UBVRIugrizJHK[3][4][5][8] magnitudes
'''
# Detect what files are present in path directory
webfiles = subprocess.check_output("ls "+path+"*.dat", shell=True).splitlines()
# Loop through detected files and get [Fe/H]
webfeh, nlines = [], []
for f in range(len(webfiles)):
df = open(webfiles[f], 'r')
lines = df.read().splitlines()
df.close()
# Save [Fe/H] value for this file
tmp = lines[11].split()
webfeh.append(float(tmp[10]))
nlines.append(len(lines))
# Find all unique [Fe/H] values, and print out formatted isochrone file
for u in np.unique(webfeh):
thisuni = [x for x in range(len(webfeh)) if np.abs(webfeh[x] - u) < 0.015]
thisnlines = max([nlines[x] for x in range(len(webfeh)) if np.abs(webfeh[x] - u) < 0.015])
# If we have all three types of files, we can print an output for this [Fe/H]
if len < 3: continue
# Determine output file name
if u < 0: outname = "%s/iso_m%03d.pc.syn.dat" % (outpath, -1.0*u*100.0)
else: outname = "%s/iso_p%03d.pc.syn.dat" % (outpath, u*100.0)
print("Printing isochrone for [Fe/H] = %5.2f to '%s'" % (u, outname))
# Loop through all webfiles for this [Fe/H] and read in data
data = np.zeros([thisnlines, 24])
for f in thisuni:
df = open(webfiles[f], 'r')
lines = df.read().splitlines()
df.close()
# Determine what file type this is
if lines[12].find('Ks') >= 0:
print(" Reading SDSS+JHK file '%s'" % (webfiles[f]))
ftype = 2
elif lines[12].find('V') >= 0:
print(" Reading UBVRI file '%s'" % (webfiles[f]))
ftype = 1
else:
print(" Reading IRAC file '%s'" % (webfiles[f]))
ftype = 3
for i in range(len(lines)):
if lines[i].find('#') >= 0: continue
tmp = lines[i].split()
if len(tmp) == 0: continue
# Save parameters to array
for j in range(7): data[i,j] = float(tmp[j+1])
# Save magnitudes to array
if ftype == 1:
for j in range(7, 12): data[i,j] = float(tmp[j+1])
elif ftype == 2:
for j in range(7, 15): data[i,j+5] = float(tmp[j+1])
else:
for j in range(7, 11): data[i,j+13] = float(tmp[j+1])
# Print out newly matched file
of = open(outname, 'w')
for s in range(thisnlines):
# Check to see whether all magnitudes exist
badmag = [x for x in data[s,:] if x == 0 or x < -9.9]
if len(badmag) > 0: continue
# Print out star
print("%6.3f %7.4f %7.4f %7.4f %7.4f %7.4f %7.4f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f" % (data[s,0], data[s,1], data[s,2], data[s,3], data[s,4], data[s,5], data[s,6], data[s,7], data[s,8], data[s,9], data[s,10], data[s,11], data[s,12], data[s,13], data[s,14], data[s,15], data[s,16], data[s,17], data[s,18], data[s,19], data[s,20], data[s,21], data[s,22], data[s,23]), file=of)
of.close()
def dartmouth(path, outpath):
'''
SUBROUTINE: DARTMOUTH
DESCRIPTION: Converts files downloaded from Dartmouth's web interface [http://stellar.dartmouth.edu/models/isolf_new.html] to a usable format
INPUT: path -- Path of folder containing the downloaded Dartmouth web files
outpath -- Path to folder to hold output
OUTPUT: NONE
FILE OUTPUT: '[outpath]/iso_[FeH].dm.syn.dat' -- File holding isochrone star information to be read into BINOCS
0: log[Age] of isochrone
1: Initial mass
2: Actual mass (at specified age)
3: log[Luminosity]
4: log[g] (surface gravity)
5: log[Temperature]
6: Bolometric magnitude
7-23: UBVRIugrizJHK[3][4][5][8] magnitudes
'''
# Detect what files are present in path directory
webfiles = subprocess.check_output("ls "+path+"*.iso", shell=True).splitlines()
# Loop through detected files and get [Fe/H]
webfeh, nlines = [], []
for f in range(len(webfiles)):
df = open(webfiles[f], 'r')
lines = df.read().splitlines()
df.close()
# Save [Fe/H] value for this file
tmp = lines[3].split()
webfeh.append(float(tmp[5]))
nlines.append(len(lines))
# Find all unique [Fe/H] values, and print out formatted isochrone file
for u in np.unique(webfeh):
thisuni = [x for x in range(len(webfeh)) if np.abs(webfeh[x] - u) < 0.015]
thisnlines = max([nlines[x] for x in range(len(webfeh)) if np.abs(webfeh[x] - u) < 0.015])
# If we have all three types of files, we can print an output for this [Fe/H]
if len < 3: continue
# Determine output file name
if u < 0: outname = "%s/iso_m%03d.dm.syn.dat" % (outpath, -1.0*u*100.0)
else: outname = "%s/iso_p%03d.dm.syn.dat" % (outpath, u*100.0)
print("Printing isochrone for [Fe/H] = %5.2f to '%s'" % (u, outname))
# Loop through all webfiles for this [Fe/H] and read in data
data = np.zeros([thisnlines, 24])
for f in thisuni:
df = open(webfiles[f], 'r')
lines = df.read().splitlines()
df.close()
# Determine what file type this is
if lines[5].find('SDSS') >= 0:
print(" Reading SDSS file '%s'" % (webfiles[f]))
ftype = 1
elif lines[5].find('Bessel') >= 0:
print(" Reading UBVRI+JHK file '%s'" % (webfiles[f]))
ftype = 2
else:
print(" Reading IRAC file '%s'" % (webfiles[f]))
ftype = 3
for i in range(len(lines)):
if lines[i].find('AGE') == 1: thisage = np.log10(float((lines[i])[5:11])*1E9)
if lines[i].find('#') >= 0: continue
tmp = lines[i].split()
if len(tmp) == 0: continue
# Save parameters to array
data[i,0] = thisage
data[i,1], data[i,2] = float(tmp[1]), float(tmp[1])
data[i,3], data[i,4], data[i,5] = float(tmp[4]), float(tmp[2]), float(tmp[3]) # LogL, LogT, LogG
data[i,6] = -2.5 * data[i,3] + 4.75 # Bolometric Magnitude
# Save magnitudes to array
if ftype == 1:
for j in range(5, 10): data[i,j+7] = float(tmp[j])
elif ftype == 2:
for j in range(5, 10): data[i,j+2] = float(tmp[j])
for j in range(10,13): data[i,j+7] = float(tmp[j])
else:
for j in range(5, 9): data[i,j+15] = float(tmp[j])
# Print out newly matched file
of = open(outname, 'w')
for s in range(thisnlines):
# Check to see whether all magnitudes exist
badmag = [x for x in data[s,:] if x == 0 or x < -9.9]
if len(badmag) > 0: continue
# Print out star
print("%6.3f %7.4f %7.4f %7.4f %7.4f %7.4f %7.4f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f" % (data[s,0], data[s,1], data[s,2], data[s,3], data[s,4], data[s,5], data[s,6], data[s,7], data[s,8], data[s,9], data[s,10], data[s,11], data[s,12], data[s,13], data[s,14], data[s,15], data[s,16], data[s,17], data[s,18], data[s,19], data[s,20], data[s,21], data[s,22], data[s,23]), file=of)
of.close()
| 44.112403
| 445
| 0.597399
| 1,980
| 11,381
| 3.424747
| 0.111616
| 0.055302
| 0.028314
| 0.042472
| 0.894116
| 0.863442
| 0.852972
| 0.837192
| 0.832326
| 0.812859
| 0
| 0.060402
| 0.230472
| 11,381
| 258
| 446
| 44.112403
| 0.713862
| 0.356999
| 0
| 0.705128
| 0
| 0.019231
| 0.139317
| 0.018998
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019231
| false
| 0
| 0.019231
| 0
| 0.038462
| 0.102564
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
06068b8e1aa08769ce5a8b9e2b0696f749f4b5fd
| 11,028
|
py
|
Python
|
lively-lions/MUD/dungeon/test/view/test_muduser.py
|
Zorakinezear/summer-code-jam-2020
|
33b4158bf89f46eed0a6bc8d37e72d904695a15e
|
[
"MIT"
] | 40
|
2020-08-02T07:38:22.000Z
|
2021-07-26T01:46:50.000Z
|
lively-lions/MUD/dungeon/test/view/test_muduser.py
|
Zorakinezear/summer-code-jam-2020
|
33b4158bf89f46eed0a6bc8d37e72d904695a15e
|
[
"MIT"
] | 134
|
2020-07-31T12:15:45.000Z
|
2020-12-13T04:42:19.000Z
|
lively-lions/MUD/dungeon/test/view/test_muduser.py
|
Zorakinezear/summer-code-jam-2020
|
33b4158bf89f46eed0a6bc8d37e72d904695a15e
|
[
"MIT"
] | 101
|
2020-07-31T12:00:47.000Z
|
2021-11-01T09:06:58.000Z
|
# from django.contrib.auth.models import User
# from django.test import TestCase
from django.test import Client
# from django.urls import reverse
import pytest
# from mixer.backend.django import mixer
from .test_base import BaseTestCase
from dungeon.models.character import MudUser
@pytest.mark.django_db
class mudUserTestCase(BaseTestCase):
def test_create_method_test(self):
client = Client()
insert_data = {'view_name': 'is_working'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b"Working POST is_working", "Should be same"
def test_create_method_multiple_data_test(self):
client = Client()
insert_data = {'view_name': 'is_working', 'multiple_data': 'is_multiple'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b"Working multiple_data POST is_multiple", "Should be same"
def test_create_hello_world_muduser(self):
client = Client()
insert_data = {'username': 'hello_world', 'password': 'hello_world', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world', "Should be equal"
def test_create_two_muduser(self):
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
insert_data = {'username': 'hello_world02', 'password': 'hello_world02', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 2, "Should be equal"
assert MudUser.objects.get(pk=2).username == 'hello_world02', "Should be equal"
def test_create_user_and_get_authenticate_and_login(self):
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'login_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Login success', "Should be same"
def test_create_user_and_login_and_login_Check_test(self):
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'login_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Login success', "Should be same"
insert_data = {'view_name': 'login_check'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Logged in', "Should be same"
def test_create_user_and_login_and_login_Check_test_with_new_client(self):
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'login_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Login success', "Should be same"
client = Client()
insert_data = {'view_name': 'login_check'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'invalid', "Should be same"
def test_create_user_and_login_and_logout(self):
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'login_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Login success', "Should be same"
insert_data = {'view_name': 'login_check'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Logged in', "Should be same"
# logout
insert_data = {'view_name': 'logout_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Logout success', "Should be same"
# fail login check
insert_data = {'view_name': 'login_check'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'invalid', "Should be same"
def test_create_user_and_login_and_logout_and_reset_now_connected_character_name(self):
client = Client()
# create user
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
# login user
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'login_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Login success', "Should be same"
# login check
insert_data = {'view_name': 'login_check'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Logged in', "Should be same"
# logout
insert_data = {'view_name': 'logout_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Logout success', "Should be same"
# fail login check
insert_data = {'view_name': 'login_check'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'invalid', "Should be same"
def test_create_user_and_login_and_get_username(self):
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'create_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == f"Success create User {insert_data['username']}".encode(), "Should be same"
assert MudUser.objects.count() == 1, "Should be equal"
assert MudUser.objects.get(pk=1).username == 'hello_world01', "Should be equal"
client = Client()
insert_data = {'username': 'hello_world01', 'password': 'hello_world01', 'view_name': 'login_user'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'Login success', "Should be same"
insert_data = {'view_name': 'get_username'}
response = client.post('http://localhost:8000/api/muduser/', insert_data)
assert response.status_code == 200, "Should be same"
assert response.content == b'hello_world01', "Should be same"
| 60.262295
| 110
| 0.667573
| 1,372
| 11,028
| 5.182216
| 0.059038
| 0.078762
| 0.087764
| 0.088608
| 0.91097
| 0.908579
| 0.892546
| 0.892546
| 0.879747
| 0.879747
| 0
| 0.030884
| 0.195502
| 11,028
| 182
| 111
| 60.593407
| 0.770514
| 0.020856
| 0
| 0.825806
| 0
| 0
| 0.35166
| 0.02086
| 0
| 0
| 0
| 0
| 0.451613
| 1
| 0.064516
| false
| 0.096774
| 0.025806
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
ae0abc7e1cce858305ba11825bb33b7973ce37e3
| 221
|
py
|
Python
|
FC/First_commit/views.py
|
tinkerhub-org/MyFOSC
|
040eb34d43c64677638322c35e2382269df2196d
|
[
"MIT"
] | 1
|
2020-09-10T03:03:15.000Z
|
2020-09-10T03:03:15.000Z
|
FC/First_commit/views.py
|
allenpbiju/MyFOSC
|
040eb34d43c64677638322c35e2382269df2196d
|
[
"MIT"
] | null | null | null |
FC/First_commit/views.py
|
allenpbiju/MyFOSC
|
040eb34d43c64677638322c35e2382269df2196d
|
[
"MIT"
] | 2
|
2020-09-09T15:34:51.000Z
|
2020-09-10T16:05:34.000Z
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return render(request,"landing.html")
def create(request):
return render(request,"create.html")
| 24.555556
| 41
| 0.778281
| 30
| 221
| 5.733333
| 0.566667
| 0.116279
| 0.22093
| 0.302326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122172
| 221
| 9
| 42
| 24.555556
| 0.886598
| 0.104072
| 0
| 0
| 0
| 0
| 0.116751
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
ae6552ad5122452c516a30b07c7e426bebd8cf0e
| 46
|
py
|
Python
|
flask/src/flask_app/shared/exceptions/__init__.py
|
AlTosterino/FlaskVsFastAPI
|
db826b1bd19216ff1ae7bdba518244178d8f59bf
|
[
"MIT"
] | 5
|
2021-04-16T20:00:09.000Z
|
2022-01-23T23:39:03.000Z
|
flask/src/flask_app/shared/exceptions/__init__.py
|
AlTosterino/FlaskVsFastAPI
|
db826b1bd19216ff1ae7bdba518244178d8f59bf
|
[
"MIT"
] | null | null | null |
flask/src/flask_app/shared/exceptions/__init__.py
|
AlTosterino/FlaskVsFastAPI
|
db826b1bd19216ff1ae7bdba518244178d8f59bf
|
[
"MIT"
] | null | null | null |
from .database import DatabaseRepositoryError
| 23
| 45
| 0.891304
| 4
| 46
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 1
| 46
| 46
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ae9e3ecf33562939549cb46019a8f2e736d4cf53
| 8,768
|
py
|
Python
|
drf_to_s3/tests/test_completion_view.py
|
treyhunner/drf-to-s3
|
2384b7e277da0e795ab9e0241e829bcc4ca4dc77
|
[
"MIT"
] | 28
|
2015-01-15T18:31:24.000Z
|
2018-11-08T07:33:42.000Z
|
drf_to_s3/tests/test_completion_view.py
|
treyhunner/drf-to-s3
|
2384b7e277da0e795ab9e0241e829bcc4ca4dc77
|
[
"MIT"
] | 10
|
2020-01-01T07:26:19.000Z
|
2021-06-25T15:26:53.000Z
|
drf_to_s3/tests/test_completion_view.py
|
treyhunner/drf-to-s3
|
2384b7e277da0e795ab9e0241e829bcc4ca4dc77
|
[
"MIT"
] | 7
|
2015-01-29T20:59:29.000Z
|
2017-04-24T16:05:48.000Z
|
import datetime, json, mock, unittest
from django.conf.urls import patterns, url
from django.test.utils import override_settings
from rest_framework import status
from rest_framework.test import APITestCase
@override_settings(
AWS_UPLOAD_SECRET_ACCESS_KEY='12345',
AWS_UPLOAD_BUCKET='my-upload-bucket',
AWS_UPLOAD_PREFIX_FUNC=lambda x: 'uploads',
AWS_STORAGE_BUCKET_NAME='my-storage-bucket',
APPEND_SLASH=False # Work around a Django bug: https://code.djangoproject.com/ticket/21766
)
class TestCompletionViewWithoutAccessControl(APITestCase):
'''
This test suite uses a static prefix function to simply
test things that don't involve access control.
'''
from drf_to_s3.views import fine_uploader_views
urls = patterns('',
url(r'^s3/uploaded$', fine_uploader_views.FineUploadCompletionView.as_view()),
)
@mock.patch('drf_to_s3.s3.copy')
def test_that_upload_notification_returns_success(self, copy):
notification = {
'bucket': 'my-upload-bucket',
'key': 'uploads/foo/bar/baz',
'uuid': '12345',
'name': 'baz',
'etag': '67890',
}
resp = self.client.post('/s3/uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK)
self.assertEquals(len(resp.content), 0)
@mock.patch('drf_to_s3.s3.copy')
@mock.patch('uuid.uuid4')
def test_that_upload_notification_copies_to_new_key(self, uuid4, copy):
uuid4.return_value = new_key = 'abcde'
notification = {
'bucket': 'my-upload-bucket',
'key': 'uploads/foo/bar/baz',
'uuid': '12345',
'name': 'baz',
'etag': '67890',
}
self.client.post('/s3/uploaded', notification)
copy.assert_called_once_with(
src_bucket=notification['bucket'],
src_key=notification['key'],
dst_bucket='my-storage-bucket',
dst_key=new_key
)
@mock.patch('drf_to_s3.s3.copy')
@mock.patch('uuid.uuid4')
def test_that_upload_notification_preserves_extension_for_new_key(self, uuid4, copy):
uuid4.return_value = new_key = 'abcde'
notification = {
'bucket': 'my-upload-bucket',
'key': 'uploads/foo/bar/baz',
'uuid': '12345',
'name': 'baz.txt',
'etag': '67890',
}
self.client.post('/s3/uploaded', notification)
copy.assert_called_once_with(
src_bucket=notification['bucket'],
src_key=notification['key'],
dst_bucket='my-storage-bucket',
dst_key=new_key + '.txt'
)
@mock.patch('drf_to_s3.s3.copy')
def test_that_upload_notification_returns_error_for_nonexistent_key(self, copy):
from drf_to_s3 import s3
copy.side_effect = s3.ObjectNotFoundException
notification = {
'bucket': 'my-upload-bucket',
'key': 'uploads/foo/bar/baz',
'uuid': '12345',
'name': 'baz.txt',
'etag': '67890',
}
resp = self.client.post('/s3/uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK) # for IE9/IE3
content = json.loads(resp.content)
self.assertEquals(content['error'], 'Invalid key or bad ETag')
def test_that_upload_notification_returns_error_for_invalid_data(self):
notification = {
'key': 'uploads/foo/bar/baz',
'uuid': '12345',
'name': 'baz.txt',
'etag': '67890',
}
resp = self.client.post('/s3/uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK) # for IE9/IE3
content = json.loads(resp.content)
expected_error = 'Unable to complete your request. Errors with bucket'
self.assertEquals(content['error'], expected_error)
@override_settings(
AWS_UPLOAD_SECRET_ACCESS_KEY='12345',
AWS_UPLOAD_BUCKET='my-upload-bucket',
AWS_STORAGE_BUCKET_NAME='my-storage-bucket',
APPEND_SLASH=False # Work around a Django bug: https://code.djangoproject.com/ticket/21766
)
class TestCompletionViewSessionAuth(APITestCase):
from drf_to_s3.views import fine_uploader_views
urls = patterns('',
url(r'^s3/uploaded$', fine_uploader_views.FineUploadCompletionView.as_view()),
)
def setUp(self):
from .util import get_user_model
self.username = 'frodo'
self.password = 'shire1234'
user = get_user_model().objects.create_user(
username=self.username,
password=self.password
)
@mock.patch('drf_to_s3.s3.copy')
def test_that_upload_notification_with_hashed_session_key_returns_success(self, copy):
self.client.login(
username=self.username,
password=self.password
)
prefix = self.username
notification = {
'bucket': 'my-upload-bucket',
'key': prefix + '/foo/bar/baz',
'uuid': '12345',
'name': 'baz',
'etag': '67890',
}
resp = self.client.post('/s3/uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK)
self.assertEquals(len(resp.content), 0)
def test_that_upload_notification_without_prefix_fails(self):
self.client.login(
username=self.username,
password=self.password
)
notification = {
'bucket': 'my-upload-bucket',
'key': 'foo/bar/baz',
'uuid': '12345',
'name': 'baz',
'etag': '67890',
}
resp = self.client.post('/s3/uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK) # for IE9/IE3
content = json.loads(resp.content)
self.assertTrue(content['error'].startswith('Key should start with'))
def test_that_upload_notification_without_login_fails(self):
prefix = self.username
notification = {
'bucket': 'my-upload-bucket',
'key': prefix + '/foo/bar/baz',
'uuid': '12345',
'name': 'baz',
'etag': '67890',
}
resp = self.client.post('/s3/uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK) # for IE9/IE3
content = json.loads(resp.content)
self.assertEquals(content['error'], 'Log in before uploading')
@override_settings(
AWS_UPLOAD_SECRET_ACCESS_KEY='12345',
AWS_UPLOAD_BUCKET='my-upload-bucket',
AWS_STORAGE_BUCKET_NAME='my-storage-bucket',
APPEND_SLASH=False # Work around a Django bug: https://code.djangoproject.com/ticket/21766
)
class TestCompletionViewSessionAuth(APITestCase):
from drf_to_s3.views import api_client_views
urls = patterns('',
url(r'^s3/api_uploaded$', api_client_views.APIUploadCompletionView.as_view())
)
def setUp(self):
from .util import get_user_model
self.username = 'frodo'
self.password = 'shire1234'
user = get_user_model().objects.create_user(
username=self.username,
password=self.password
)
self.client.login(
username=self.username,
password=self.password
)
@mock.patch('drf_to_s3.s3.copy')
def test_that_api_upload_notification_returns_success(self, copy):
prefix = self.username
notification = {
'key': prefix + '/foo/bar/baz',
'filename': 'baz',
}
resp = self.client.post('/s3/api_uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_200_OK)
self.assertEquals(len(resp.content), 0)
def test_that_api_upload_notification_returns_error_for_invalid_data(self):
notification = {
'name': 'baz.txt',
}
resp = self.client.post('/s3/api_uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_400_BAD_REQUEST)
content = json.loads(resp.content)
self.assertEquals(len(content.keys()), 2)
@mock.patch('drf_to_s3.s3.copy')
def test_that_api_upload_notification_returns_error_for_nonexistent_key(self, copy):
from drf_to_s3 import s3
copy.side_effect = s3.ObjectNotFoundException
prefix = self.username
notification = {
'key': prefix + '/foo/bar/baz',
'filename': 'baz.txt',
}
resp = self.client.post('/s3/api_uploaded', notification)
self.assertEquals(resp.status_code, status.HTTP_400_BAD_REQUEST)
content = json.loads(resp.content)
self.assertEquals(content['detail'], 'Invalid key or bad ETag')
| 37.152542
| 94
| 0.625
| 1,012
| 8,768
| 5.1917
| 0.159091
| 0.05177
| 0.015988
| 0.033498
| 0.869052
| 0.86182
| 0.83346
| 0.83346
| 0.830034
| 0.782832
| 0
| 0.030539
| 0.253079
| 8,768
| 235
| 95
| 37.310638
| 0.771721
| 0.041172
| 0
| 0.734597
| 0
| 0
| 0.154535
| 0
| 0
| 0
| 0
| 0
| 0.094787
| 1
| 0.061611
| false
| 0.033175
| 0.056872
| 0
| 0.146919
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8816ab54cf36b2c7e6cab0a91386160b0f0f5bb5
| 19,145
|
py
|
Python
|
Archive/use_saved_class+reg.py
|
suriya-jambunathan/2021_FYP_108117096-108117040
|
bf5bd4b84d20e03ee00dbba1886d20d8896a6294
|
[
"MIT"
] | null | null | null |
Archive/use_saved_class+reg.py
|
suriya-jambunathan/2021_FYP_108117096-108117040
|
bf5bd4b84d20e03ee00dbba1886d20d8896a6294
|
[
"MIT"
] | null | null | null |
Archive/use_saved_class+reg.py
|
suriya-jambunathan/2021_FYP_108117096-108117040
|
bf5bd4b84d20e03ee00dbba1886d20d8896a6294
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 28 15:24:03 2021
@author: suriy
"""
from numba import jit
# Fitting the nan values with the average
def avgfit(l):
na = pd.isna(l)
arr = []
for i in range(len(l)):
if na[i] == False:
arr.append(l[i])
avg = sum(arr)/len(arr)
fit_arr = []
for i in range(len(l)):
if na[i] == False:
fit_arr.append(l[i])
elif na[i] == True:
fit_arr.append(avg)
return(fit_arr)
#@jit(nopython=True)
# Weighted Mean Absolute Percentage Error
def mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = list(y_true), list(y_pred)
l = len(y_true)
num = 0
den = 0
for i in range(l):
num = num + (abs(y_pred[i] - y_true[i]))
den = den + y_true[i]
return abs(num/den) * 100
# Importing the Libraries
import joblib
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.metrics import explained_variance_score
from sklearn.metrics import confusion_matrix, accuracy_score
from sklearn.utils import class_weight
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import RandomForestRegressor
import warnings
warnings.simplefilter(action='ignore')
# Importing the Dataset
dataset = pd.read_csv('antenna.csv')
#X
X = dataset.loc[:, dataset.columns != 'vswr']
X = X.loc[:, X.columns != 'gain']
X = X.loc[:, X.columns != 'bandwidth']
Xi = X.iloc[:, :-3]
Xi = pd.DataFrame(Xi)
#y
bw = avgfit(list(dataset['bandwidth']))
dataset['bandwidth'] = bw
for i in range(len(bw)):
if bw[i] < 100:
bw[i] = 'Class 1'
elif bw[i] >= 100 and bw[i] < 115:
bw[i] = 'Class 2'
elif bw[i] >= 115 and bw[i] < 120:
bw[i] = 'Class 3'
elif bw[i] >= 120 and bw[i] < 121:
bw[i] = 'Class 4'
elif bw[i] >= 121 and bw[i] < 122:
bw[i] = 'Class 5'
elif bw[i] >= 122 :
bw[i] = 'Class 6'
gain =avgfit(list(dataset['gain']))
dataset['gain'] = gain
for i in range(len(gain)):
if gain[i] < 1.3:
gain[i] = 'Class 1'
elif gain[i] >= 1.3 and gain[i] < 1.5:
gain[i] = 'Class 2'
elif gain[i] >= 1.5 and gain[i] < 2.4:
gain[i] = 'Class 3'
elif gain[i] >= 2.4 and gain[i] < 2.7:
gain[i] = 'Class 4'
elif gain[i] >= 2.7 and gain[i] < 2.9:
gain[i] = 'Class 5'
elif gain[i] >= 2.9 and gain[i] < 3.5:
gain[i] = 'Class 6'
vswr =avgfit(list(dataset['vswr']))
dataset['vswr'] = vswr
for i in range(len(vswr)):
if vswr[i] >= 1 and vswr[i] < 1.16:
vswr[i] = 'Class 1'
elif vswr[i] >= 1.16 and vswr[i] < 1.32:
vswr[i] = 'Class 2'
elif vswr[i] >= 1.32 and vswr[i] < 1.5:
vswr[i] = 'Class 3'
elif vswr[i] >= 1.5 and vswr[i] < 2:
vswr[i] = 'Class 4'
elif vswr[i] >= 2 and vswr[i] < 4:
vswr[i] = 'Class 5'
elif vswr[i] >= 4:
vswr[i] = 'Class 6'
y1 = pd.DataFrame(bw)
y2 = pd.DataFrame(gain)
y3 = pd.DataFrame(vswr)
# Accuracy list
acc_list = []
params = ['bandwidth','gain','vswr']
y = pd.DataFrame()
y['bandwidth'] = bw
y['vswr'] = vswr
y['gain'] = gain
classes = ['Class 1','Class 2','Class 3','Class 4','Class 5','Class 6']
max_acc = []
acc_conf = []
#BANDWIDTH
param = 'bandwidth'
# Defining the Classifier and Regressor
#Classifier
classifiers = joblib.load('class_bw.sav')
#Regressor
regressors = joblib.load('reg_bw.sav')
# Splitting into Test and Train set
X_train, X_test, y_train, y_test = train_test_split(Xi, y[param], test_size = 0.3, random_state = 0)
y_train = pd.DataFrame(y_train)
y_test = pd.DataFrame(y_test)
count = [(list(y_train[param])).count(x) for x in list(set(list(y_train[param])))]
class_weights = dict(zip(list(set(list(y_train[param]))),count))
list_1 = []
list_2 = []
list_3 = []
list_4 = []
list_5 = []
list_6 = []
# Splitting the train set into specific labels for Regression Training
for i in range(572):
try:
if 'Class 1' in y_train[param][i]:
list_1.append(i)
elif 'Class 2' in y_train[param][i]:
list_2.append(i)
elif 'Class 3' in y_train[param][i]:
list_3.append(i)
elif 'Class 4' in y_train[param][i]:
list_4.append(i)
elif 'Class 5' in y_train[param][i]:
list_5.append(i)
elif 'Class 6' in y_train[param][i]:
list_6.append(i)
except:
continue
X_train_1 = X_train.loc[list_1]
X_train_2 = X_train.loc[list_2]
X_train_3 = X_train.loc[list_3]
X_train_4 = X_train.loc[list_4]
X_train_5 = X_train.loc[list_5]
X_train_6 = X_train.loc[list_6]
y_train_1 = (dataset[param]).loc[list_1]
y_train_2 = (dataset[param]).loc[list_2]
y_train_3 = (dataset[param]).loc[list_3]
y_train_4 = (dataset[param]).loc[list_4]
y_train_5 = (dataset[param]).loc[list_5]
y_train_6 = (dataset[param]).loc[list_6]
print(param)
for clf in range(len(classifiers)):
try:
#CLASSIFICATION
# Fitting Classifier to the Training set
classifier = classifiers[clf][0]
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
y_predl = list(y_pred)
y_pred = pd.DataFrame(y_predl)
# Making the Confusion Matrix
#cm = confusion_matrix(list(y_test[param]), y_predl)
acc = accuracy_score(list(y_test[param]), y_predl)
testlist_1 = []
testlist_2 = []
testlist_3 = []
testlist_4 = []
testlist_5 = []
testlist_6 = []
# Splitting the train set into specific labels for Regression Training
xtestix = X_test.index.values.tolist()
y_pred['actual index'] = xtestix
for i in range(172):
try:
if 'Class 1' in y_pred[0][i]:
testlist_1.append(y_pred['actual index'][i])
elif 'Class 2' in y_pred[0][i]:
testlist_2.append(y_pred['actual index'][i])
elif 'Class 3' in y_pred[0][i]:
testlist_3.append(y_pred['actual index'][i])
elif 'Class 4' in y_pred[0][i]:
testlist_4.append(y_pred['actual index'][i])
elif 'Class 5' in y_pred[0][i]:
testlist_5.append(y_pred['actual index'][i])
elif 'Class 6' in y_pred[0][i]:
testlist_6.append(y_pred['actual index'][i])
except:
continue
X_test_1 = X_test.loc[testlist_1]
X_test_2 = X_test.loc[testlist_2]
X_test_3 = X_test.loc[testlist_3]
X_test_4 = X_test.loc[testlist_4]
X_test_5 = X_test.loc[testlist_5]
X_test_6 = X_test.loc[testlist_6]
y_test_1 = (dataset[param]).loc[testlist_1]
y_test_2 = (dataset[param]).loc[testlist_2]
y_test_3 = (dataset[param]).loc[testlist_3]
y_test_4 = (dataset[param]).loc[testlist_4]
y_test_5 = (dataset[param]).loc[testlist_5]
y_test_6 = (dataset[param]).loc[testlist_6]
for reg in range(len(regressors)):
# REGRESSION
wmape = ((mean_absolute_percentage_error(y_test_1, (regressors[reg][0]).predict(X_test_1))*len(testlist_1)) +
(mean_absolute_percentage_error(y_test_2, (regressors[reg][1]).predict(X_test_2))*len(testlist_2)) +
(mean_absolute_percentage_error(y_test_3, (regressors[reg][2]).predict(X_test_3))*len(testlist_3)) +
(mean_absolute_percentage_error(y_test_4, (regressors[reg][3]).predict(X_test_4))*len(testlist_4)) +
(mean_absolute_percentage_error(y_test_5, (regressors[reg][4]).predict(X_test_5))*len(testlist_5)) +
(mean_absolute_percentage_error(y_test_6, (regressors[reg][5]).predict(X_test_6))*len(testlist_6)))
wmape = wmape/172
#acc_list.append([param,['Accuracy',acc],['MAPE',wmape]])
acc_conf.append([param,clf,reg,acc,wmape])
print(str(round(((clf)*0.0107),2)) + ' %')
except:
continue
del classifiers
del regressors
#GAIN
param = 'gain'
# Defining the Classifier and Regressor
#Classifier
classifiers = joblib.load('class_gain.sav')
#Regressor
regressors = joblib.load('reg_vswr.sav')
# Splitting into Test and Train set
X_train, X_test, y_train, y_test = train_test_split(Xi, y[param], test_size = 0.3, random_state = 0)
y_train = pd.DataFrame(y_train)
y_test = pd.DataFrame(y_test)
count = [(list(y_train[param])).count(x) for x in list(set(list(y_train[param])))]
class_weights = dict(zip(list(set(list(y_train[param]))),count))
list_1 = []
list_2 = []
list_3 = []
list_4 = []
list_5 = []
list_6 = []
# Splitting the train set into specific labels for Regression Training
for i in range(572):
try:
if 'Class 1' in y_train[param][i]:
list_1.append(i)
elif 'Class 2' in y_train[param][i]:
list_2.append(i)
elif 'Class 3' in y_train[param][i]:
list_3.append(i)
elif 'Class 4' in y_train[param][i]:
list_4.append(i)
elif 'Class 5' in y_train[param][i]:
list_5.append(i)
elif 'Class 6' in y_train[param][i]:
list_6.append(i)
except:
continue
X_train_1 = X_train.loc[list_1]
X_train_2 = X_train.loc[list_2]
X_train_3 = X_train.loc[list_3]
X_train_4 = X_train.loc[list_4]
X_train_5 = X_train.loc[list_5]
X_train_6 = X_train.loc[list_6]
y_train_1 = (dataset[param]).loc[list_1]
y_train_2 = (dataset[param]).loc[list_2]
y_train_3 = (dataset[param]).loc[list_3]
y_train_4 = (dataset[param]).loc[list_4]
y_train_5 = (dataset[param]).loc[list_5]
y_train_6 = (dataset[param]).loc[list_6]
print(param)
for clf in range(len(classifiers)):
try:
#CLASSIFICATION
# Fitting Classifier to the Training set
classifier = classifiers[clf][0]
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
y_predl = list(y_pred)
y_pred = pd.DataFrame(y_predl)
# Making the Confusion Matrix
#cm = confusion_matrix(list(y_test[param]), y_predl)
acc = accuracy_score(list(y_test[param]), y_predl)
testlist_1 = []
testlist_2 = []
testlist_3 = []
testlist_4 = []
testlist_5 = []
testlist_6 = []
# Splitting the train set into specific labels for Regression Training
xtestix = X_test.index.values.tolist()
y_pred['actual index'] = xtestix
for i in range(172):
try:
if 'Class 1' in y_pred[0][i]:
testlist_1.append(y_pred['actual index'][i])
elif 'Class 2' in y_pred[0][i]:
testlist_2.append(y_pred['actual index'][i])
elif 'Class 3' in y_pred[0][i]:
testlist_3.append(y_pred['actual index'][i])
elif 'Class 4' in y_pred[0][i]:
testlist_4.append(y_pred['actual index'][i])
elif 'Class 5' in y_pred[0][i]:
testlist_5.append(y_pred['actual index'][i])
elif 'Class 6' in y_pred[0][i]:
testlist_6.append(y_pred['actual index'][i])
except:
continue
X_test_1 = X_test.loc[testlist_1]
X_test_2 = X_test.loc[testlist_2]
X_test_3 = X_test.loc[testlist_3]
X_test_4 = X_test.loc[testlist_4]
X_test_5 = X_test.loc[testlist_5]
X_test_6 = X_test.loc[testlist_6]
y_test_1 = (dataset[param]).loc[testlist_1]
y_test_2 = (dataset[param]).loc[testlist_2]
y_test_3 = (dataset[param]).loc[testlist_3]
y_test_4 = (dataset[param]).loc[testlist_4]
y_test_5 = (dataset[param]).loc[testlist_5]
y_test_6 = (dataset[param]).loc[testlist_6]
for reg in range(len(regressors)):
# REGRESSION
wmape = ((mean_absolute_percentage_error(y_test_1, (regressors[reg][0]).predict(X_test_1))*len(testlist_1)) +
(mean_absolute_percentage_error(y_test_2, (regressors[reg][1]).predict(X_test_2))*len(testlist_2)) +
(mean_absolute_percentage_error(y_test_3, (regressors[reg][2]).predict(X_test_3))*len(testlist_3)) +
(mean_absolute_percentage_error(y_test_4, (regressors[reg][3]).predict(X_test_4))*len(testlist_4)) +
(mean_absolute_percentage_error(y_test_5, (regressors[reg][4]).predict(X_test_5))*len(testlist_5)) +
(mean_absolute_percentage_error(y_test_6, (regressors[reg][5]).predict(X_test_6))*len(testlist_6)))
wmape = wmape/172
#acc_list.append([param,['Accuracy',acc],['MAPE',wmape]])
acc_conf.append([param,clf,reg,acc,wmape])
print(str(round(((clf+3115)*0.0107),2)) + ' %')
except:
continue
del classifiers
del regressors
#VSWR
param = 'vswr'
# Defining the Classifier and Regressor
#Classifier
classifiers = joblib.load('class_vswr.sav')
#Regressor
regressors = joblib.load('reg_vswr.sav')
# Splitting into Test and Train set
X_train, X_test, y_train, y_test = train_test_split(Xi, y[param], test_size = 0.3, random_state = 0)
y_train = pd.DataFrame(y_train)
y_test = pd.DataFrame(y_test)
count = [(list(y_train[param])).count(x) for x in list(set(list(y_train[param])))]
class_weights = dict(zip(list(set(list(y_train[param]))),count))
list_1 = []
list_2 = []
list_3 = []
list_4 = []
list_5 = []
list_6 = []
# Splitting the train set into specific labels for Regression Training
for i in range(572):
try:
if 'Class 1' in y_train[param][i]:
list_1.append(i)
elif 'Class 2' in y_train[param][i]:
list_2.append(i)
elif 'Class 3' in y_train[param][i]:
list_3.append(i)
elif 'Class 4' in y_train[param][i]:
list_4.append(i)
elif 'Class 5' in y_train[param][i]:
list_5.append(i)
elif 'Class 6' in y_train[param][i]:
list_6.append(i)
except:
continue
X_train_1 = X_train.loc[list_1]
X_train_2 = X_train.loc[list_2]
X_train_3 = X_train.loc[list_3]
X_train_4 = X_train.loc[list_4]
X_train_5 = X_train.loc[list_5]
X_train_6 = X_train.loc[list_6]
y_train_1 = (dataset[param]).loc[list_1]
y_train_2 = (dataset[param]).loc[list_2]
y_train_3 = (dataset[param]).loc[list_3]
y_train_4 = (dataset[param]).loc[list_4]
y_train_5 = (dataset[param]).loc[list_5]
y_train_6 = (dataset[param]).loc[list_6]
print(param)
for clf in range(len(classifiers)):
try:
#CLASSIFICATION
# Fitting Classifier to the Training set
classifier = classifiers[clf][0]
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
y_predl = list(y_pred)
y_pred = pd.DataFrame(y_predl)
# Making the Confusion Matrix
#cm = confusion_matrix(list(y_test[param]), y_predl)
acc = accuracy_score(list(y_test[param]), y_predl)
testlist_1 = []
testlist_2 = []
testlist_3 = []
testlist_4 = []
testlist_5 = []
testlist_6 = []
# Splitting the train set into specific labels for Regression Training
xtestix = X_test.index.values.tolist()
y_pred['actual index'] = xtestix
for i in range(172):
try:
if 'Class 1' in y_pred[0][i]:
testlist_1.append(y_pred['actual index'][i])
elif 'Class 2' in y_pred[0][i]:
testlist_2.append(y_pred['actual index'][i])
elif 'Class 3' in y_pred[0][i]:
testlist_3.append(y_pred['actual index'][i])
elif 'Class 4' in y_pred[0][i]:
testlist_4.append(y_pred['actual index'][i])
elif 'Class 5' in y_pred[0][i]:
testlist_5.append(y_pred['actual index'][i])
elif 'Class 6' in y_pred[0][i]:
testlist_6.append(y_pred['actual index'][i])
except:
continue
X_test_1 = X_test.loc[testlist_1]
X_test_2 = X_test.loc[testlist_2]
X_test_3 = X_test.loc[testlist_3]
X_test_4 = X_test.loc[testlist_4]
X_test_5 = X_test.loc[testlist_5]
X_test_6 = X_test.loc[testlist_6]
y_test_1 = (dataset[param]).loc[testlist_1]
y_test_2 = (dataset[param]).loc[testlist_2]
y_test_3 = (dataset[param]).loc[testlist_3]
y_test_4 = (dataset[param]).loc[testlist_4]
y_test_5 = (dataset[param]).loc[testlist_5]
y_test_6 = (dataset[param]).loc[testlist_6]
for reg in range(len(regressors)):
# REGRESSION
wmape = ((mean_absolute_percentage_error(y_test_1, (regressors[reg][0]).predict(X_test_1))*len(testlist_1)) +
(mean_absolute_percentage_error(y_test_2, (regressors[reg][1]).predict(X_test_2))*len(testlist_2)) +
(mean_absolute_percentage_error(y_test_3, (regressors[reg][2]).predict(X_test_3))*len(testlist_3)) +
(mean_absolute_percentage_error(y_test_4, (regressors[reg][3]).predict(X_test_4))*len(testlist_4)) +
(mean_absolute_percentage_error(y_test_5, (regressors[reg][4]).predict(X_test_5))*len(testlist_5)) +
(mean_absolute_percentage_error(y_test_6, (regressors[reg][5]).predict(X_test_6))*len(testlist_6)))
wmape = wmape/172
#acc_list.append([param,['Accuracy',acc],['MAPE',wmape]])
acc_conf.append([param,clf,reg,acc,wmape])
print(str(round(((clf+6230)*0.0107),2)) + ' %')
except:
continue
del classifiers
del regressors
try:
np.savetxt("acc_clf+reg.csv",
acc_conf,
delimiter =", ",
fmt ='% s')
print("The file has been saved")
except:
print(" ")
print("The file is not saved yet")
| 32.614991
| 123
| 0.56913
| 2,769
| 19,145
| 3.69628
| 0.068617
| 0.030777
| 0.05276
| 0.032829
| 0.821397
| 0.812018
| 0.802736
| 0.802736
| 0.802736
| 0.788959
| 0
| 0.039149
| 0.298198
| 19,145
| 586
| 124
| 32.670648
| 0.722611
| 0.085244
| 0
| 0.727494
| 0
| 0
| 0.055786
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004866
| false
| 0
| 0.029197
| 0
| 0.036496
| 0.021898
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88305f4fabefdccaf2374f40cefaebce4a8f4711
| 32,470
|
py
|
Python
|
cycle_2018/migrations/0002_auto_20180216_1834.py
|
RobBickel/nyt-fec
|
802df867c3b31fff8e922be00bab6f40a5db2d00
|
[
"Apache-2.0"
] | 17
|
2018-03-27T15:09:58.000Z
|
2020-05-13T11:32:43.000Z
|
cycle_2018/migrations/0002_auto_20180216_1834.py
|
RobBickel/nyt-fec
|
802df867c3b31fff8e922be00bab6f40a5db2d00
|
[
"Apache-2.0"
] | 59
|
2018-03-21T17:08:15.000Z
|
2021-12-13T19:47:37.000Z
|
cycle_2018/migrations/0002_auto_20180216_1834.py
|
RobBickel/nyt-fec
|
802df867c3b31fff8e922be00bab6f40a5db2d00
|
[
"Apache-2.0"
] | 11
|
2018-09-11T23:18:32.000Z
|
2021-12-15T08:43:58.000Z
|
# Generated by Django 2.0.1 on 2018-02-16 18:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cycle_2018', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='filing',
name='id',
),
migrations.AddField(
model_name='filing',
name='filing_id',
field=models.IntegerField(default=1, primary_key=True, serialize=False),
preserve_default=False,
),
migrations.AlterField(
model_name='filing',
name='cash_on_hand_beginning_period',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cash_on_hand_close_of_period',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='committee_name',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AlterField(
model_name='filing',
name='coverage_from_date',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='filing',
name='coverage_through_date',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_candidate_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_candidate_loan_repayments',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_candidate_loans',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_contributions_from_candidate',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_contributions_to_candidates',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_coordinated_expenditures_by_party_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_exempt_legal_accounting_disbursement',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_federal_election_activity_all_federal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_federal_election_activity_federal_share',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_federal_election_activity_levin_share',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_federal_election_activity_total',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_fundraising',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_fundraising_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_independent_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_individual_contribution_total',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_individuals_itemized',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_individuals_unitemized',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_items_on_hand_to_be_liquidated',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_legal_and_accounting',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_levin_funds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_loans_made',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_made_or_guaranteed_by_candidate',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_net_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_net_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_offsets_to_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_operating',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_federal_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_federal_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_loan_repayments',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_loans',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_political_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_political_committees_pacs',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_other_repayments',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_pac_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_political_party_committees_refunds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_political_party_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_received_from_or_guaranteed_by_cand',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_refunds_to_individuals',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_refunds_to_other_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_refunds_to_party_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_shared_operating_expenditures_federal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_shared_operating_expenditures_nonfederal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_contributions_refunds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_disbursements_period',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_federal_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_federal_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_federal_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_individual_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_loan_repayments_made',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_loan_repayments_received',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_loans',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_nonfederal_transfers',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_offset_to_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_total_refunds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_transfers_from_aff_other_party_cmttees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_transfers_from_authorized',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_transfers_from_nonfederal_h3',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_transfers_to_affiliated',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='cycle_transfers_to_other_authorized_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='date_signed',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='filing',
name='debts_by_summary',
field=models.DecimalField(blank=True, decimal_places=2, help_text='Current debt owed by the committee', max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='election_district',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='filing',
name='election_state',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.AlterField(
model_name='filing',
name='filer_id',
field=models.CharField(blank=True, help_text='FEC id of the filer', max_length=10, null=True),
),
migrations.AlterField(
model_name='filing',
name='form',
field=models.CharField(blank=True, help_text='the base form type (excluding amendment indications)', max_length=20, null=True),
),
migrations.AlterField(
model_name='filing',
name='form_type',
field=models.CharField(blank=True, help_text='the full form type from the filing', max_length=20, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_candidate_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_candidate_loan_repayments',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_candidate_loans',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_contributions_from_candidate',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_contributions_to_candidates',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_coordinated_expenditures_by_party_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_exempt_legal_accounting_disbursement',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_federal_election_activity_all_federal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_federal_election_activity_federal_share',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_federal_election_activity_levin_share',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_federal_election_activity_total',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_fundraising',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_fundraising_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_independent_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_individual_contribution_total',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_individuals_itemized',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_individuals_unitemized',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_items_on_hand_to_be_liquidated',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_legal_and_accounting',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_levin_funds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_loans_made',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_made_or_guaranteed_by_candidate',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_net_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_net_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_offsets_to_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_operating',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_federal_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_federal_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_loan_repayments',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_loans',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_political_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_political_committees_pacs',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_other_repayments',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_pac_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_political_party_committees_refunds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_political_party_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_received_from_or_guaranteed_by_cand',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_refunds_to_individuals',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_refunds_to_other_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_refunds_to_party_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_shared_operating_expenditures_federal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_shared_operating_expenditures_nonfederal',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_contributions_refunds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_disbursements_period',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_federal_disbursements',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_federal_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_federal_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_individual_contributions',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_loan_repayments_made',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_loan_repayments_received',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_loans',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_nonfederal_transfers',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_offset_to_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_operating_expenditures',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_receipts',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_total_refunds',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_transfers_from_aff_other_party_cmttees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_transfers_from_authorized',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_transfers_from_nonfederal_h3',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_transfers_to_affiliated',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
migrations.AlterField(
model_name='filing',
name='period_transfers_to_other_authorized_committees',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=12, null=True),
),
]
| 43.642473
| 142
| 0.619834
| 3,383
| 32,470
| 5.690511
| 0.041679
| 0.07636
| 0.11376
| 0.144096
| 0.973092
| 0.973092
| 0.973092
| 0.969196
| 0.962651
| 0.955067
| 0
| 0.019032
| 0.27182
| 32,470
| 743
| 143
| 43.701211
| 0.79517
| 0.001386
| 0
| 0.78019
| 1
| 0
| 0.168368
| 0.123894
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001357
| 0
| 0.005427
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
883174ed49fce74a3e45db3e7528b0ae71f8232b
| 270
|
py
|
Python
|
src/teach/modeling/toast/utils.train.py
|
pablokvitca/teach
|
d538de5d5850266ff298099182af6d148f111f03
|
[
"MIT"
] | null | null | null |
src/teach/modeling/toast/utils.train.py
|
pablokvitca/teach
|
d538de5d5850266ff298099182af6d148f111f03
|
[
"MIT"
] | null | null | null |
src/teach/modeling/toast/utils.train.py
|
pablokvitca/teach
|
d538de5d5850266ff298099182af6d148f111f03
|
[
"MIT"
] | null | null | null |
def does_model_exist(model_load_path):
pass
def load_model(model_load_path):
pass
def load_or_create_model(model_load_path, model_class, model_params):
return load_model(model_load_path) if does_model_exist(model_load_path) else model_class(**model_params)
| 33.75
| 108
| 0.822222
| 45
| 270
| 4.422222
| 0.311111
| 0.226131
| 0.326633
| 0.271357
| 0.582915
| 0.447236
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107407
| 270
| 8
| 108
| 33.75
| 0.825726
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.333333
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
884164141faa39ff5424efea17b98450d81e4d0f
| 4,366
|
py
|
Python
|
xconfig/__init__.py
|
remorses/xconfig
|
f44e0e3abf9477f95c051065fd8ae51035e507c2
|
[
"MIT"
] | null | null | null |
xconfig/__init__.py
|
remorses/xconfig
|
f44e0e3abf9477f95c051065fd8ae51035e507c2
|
[
"MIT"
] | null | null | null |
xconfig/__init__.py
|
remorses/xconfig
|
f44e0e3abf9477f95c051065fd8ae51035e507c2
|
[
"MIT"
] | null | null | null |
import json
import yaml
import os.path
import aiofiles
class Dumper(yaml.Dumper):
def increase_indent(self, flow=False, indentless=False):
return super(Dumper, self).increase_indent(flow, False)
dumps = lambda x: yaml.dump(x, Dumper=Dumper, default_flow_style=False)
class Config:
def __init__(self, filename, default={}):
if not os.path.exists(filename):
self.content = default
self.default = default
# with open(filename, 'w') as f:
# f.write(default)
else:
with open(filename,) as f:
data = yaml.safe_load(f) or {}
self.content = {**default, **data}
self.filename = filename
def write(self, prop, value):
content = self.content
for part in prop.split('.')[:-1]:
content[part] = content[part] if part in content else {}
content = content[part]
content[prop.split('.')[-1]] = value
with open(self.filename, 'w') as f:
data = dumps(self.content, )
f.write(data)
def push(self, prop, value):
content = self.content
for part in prop.split('.')[:-1]:
content[part] = content[part] if part in content else {}
content = content[part]
field = prop.split('.')[-1]
if field in content:
if isinstance(content[field], (list, tuple)):
content[field].append(value)
else:
content[field] = [value]
else:
content[field] = [value]
with open(self.filename, 'w') as f:
data = dumps(self.content, )
f.write(data)
def __getitem__(self, name):
filename = self.filename
if os.path.exists(filename):
with open(filename,) as f:
data = yaml.safe_load(f) or {}
self.content = {**self.default, **data, **self.content}
return self.content[name]
def delete(self):
try:
os.remove(self.filename)
except Exception:
pass
def __repr__(self):
return 'Config(filename="' + self.filename + '", data= ' + json.dumps(self.content, indent=4, default=str) + ')\n'
async def AsyncConfig(filename, default={}):
self = _AsyncConfig()
if not os.path.exists(filename):
self.content = default
self.default = default
# with open(filename, 'w') as f:
# f.write(default)
else:
async with aiofiles.open(filename,) as f:
s = await f.read()
data = yaml.safe_load(s) or {}
self.content = {**default, **data}
self.filename = filename
return self
class _AsyncConfig:
async def write(self, prop, value):
content = self.content
for part in prop.split('.')[:-1]:
content[part] = content[part] if part in content else {}
content = content[part]
content[prop.split('.')[-1]] = value
async with aiofiles.open(self.filename, 'w') as f:
data = dumps(self.content, )
await f.write(data)
async def push(self, prop, value):
content = self.content
for part in prop.split('.')[:-1]:
content[part] = content[part] if part in content else {}
content = content[part]
field = prop.split('.')[-1]
if field in content:
if isinstance(content[field], (list, tuple)):
content[field].append(value)
else:
content[field] = [value]
else:
content[field] = [value]
async with aiofiles.open(self.filename, 'w') as f:
data = dumps(self.content, )
await f.write(data)
def __getitem__(self, name):
filename = self.filename
if os.path.exists(filename):
with open(filename,) as f:
data = yaml.safe_load(f) or {}
self.content = {**self.default, **data, **self.content}
return self.content[name]
async def delete(self):
try:
os.remove(self.filename)
except Exception:
pass
def __repr__(self):
return 'Config(filename="' + self.filename + '", data= ' + json.dumps(self.content, indent=4, default=str) + ')\n'
| 33.075758
| 122
| 0.540311
| 502
| 4,366
| 4.639442
| 0.141434
| 0.094461
| 0.03435
| 0.030915
| 0.829541
| 0.829541
| 0.829541
| 0.829541
| 0.797338
| 0.797338
| 0
| 0.003422
| 0.330738
| 4,366
| 131
| 123
| 33.328244
| 0.793635
| 0.023591
| 0
| 0.788991
| 0
| 0
| 0.016444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082569
| false
| 0.018349
| 0.036697
| 0.027523
| 0.201835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31f54a86c2f2c8c6690a895b05b79756a139dd5f
| 4,039
|
py
|
Python
|
tests/test_visitors/test_ast/test_functions/test_stop_iteration.py
|
n1kolasM/wemake-python-styleguide
|
f39e87897de89bea1c49d410beb5b1cbaf930807
|
[
"MIT"
] | 1
|
2020-02-21T18:58:44.000Z
|
2020-02-21T18:58:44.000Z
|
tests/test_visitors/test_ast/test_functions/test_stop_iteration.py
|
n1kolasM/wemake-python-styleguide
|
f39e87897de89bea1c49d410beb5b1cbaf930807
|
[
"MIT"
] | 15
|
2020-02-22T11:09:46.000Z
|
2020-02-27T16:36:54.000Z
|
tests/test_visitors/test_ast/test_functions/test_stop_iteration.py
|
n1kolasM/wemake-python-styleguide
|
f39e87897de89bea1c49d410beb5b1cbaf930807
|
[
"MIT"
] | 1
|
2019-12-12T19:18:58.000Z
|
2019-12-12T19:18:58.000Z
|
# -*- coding: utf-8 -*-
import pytest
from wemake_python_styleguide.violations.best_practices import (
StopIterationInsideGeneratorViolation,
)
from wemake_python_styleguide.visitors.ast.functions import (
FunctionDefinitionVisitor,
)
stop_iteration_method = """
class CheckStopIteration():
def check_stop_iteration(self):
{0}
raise {1}
"""
stop_iteration_function = """
def check_stop_iteration():
{0}
raise {1}
"""
@pytest.mark.parametrize('code', [
stop_iteration_method,
stop_iteration_function,
])
@pytest.mark.parametrize('statement', [
'yield',
'yield some_parameter',
])
@pytest.mark.parametrize('exception', [
'StopIteration',
'StopIteration()',
'StopIteration(1)',
])
def test_stop_iteration_inside_generators(
assert_errors,
parse_ast_tree,
code,
statement,
exception,
default_options,
mode,
):
"""Testing that `raise StopIteration` is restricted inside generators."""
tree = parse_ast_tree(mode(code.format(statement, exception)))
visitor = FunctionDefinitionVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [StopIterationInsideGeneratorViolation])
@pytest.mark.parametrize('code', [
stop_iteration_method,
stop_iteration_function,
])
@pytest.mark.parametrize('statement', [
'yield from generator()',
])
@pytest.mark.parametrize('exception', [
'StopIteration',
'StopIteration()',
'StopIteration(1)',
])
def test_stop_iteration_in_generators_yield_from(
assert_errors,
parse_ast_tree,
code,
statement,
exception,
default_options,
):
"""Testing that `raise StopIteration` is restricted inside generators."""
tree = parse_ast_tree(code.format(statement, exception))
visitor = FunctionDefinitionVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [StopIterationInsideGeneratorViolation])
@pytest.mark.parametrize('code', [
stop_iteration_method,
stop_iteration_function,
])
@pytest.mark.parametrize('statement', [
'print("not a generator")',
])
@pytest.mark.parametrize('exception', [
'StopIteration',
'StopIteration()',
'StopIteration(1)',
])
def test_stop_iteration_inside_bare_functions(
assert_errors,
parse_ast_tree,
code,
statement,
exception,
default_options,
mode,
):
"""Testing that `raise StopIteration` is allowed inside bare functions."""
tree = parse_ast_tree(mode(code.format(statement, exception)))
visitor = FunctionDefinitionVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])
@pytest.mark.parametrize('code', [
stop_iteration_method,
stop_iteration_function,
])
@pytest.mark.parametrize('statement', [
'yield',
'yield some_parameter',
])
@pytest.mark.parametrize('exception', [
'RuntimeError',
'RuntimeError()',
'RuntimeError(1)',
])
def test_other_exceptions_inside_generators(
assert_errors,
parse_ast_tree,
code,
statement,
exception,
default_options,
mode,
):
"""Testing that `raise` of other exceptions is allowed inside generators."""
tree = parse_ast_tree(mode(code.format(statement, exception)))
visitor = FunctionDefinitionVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])
@pytest.mark.parametrize('code', [
stop_iteration_method,
stop_iteration_function,
])
@pytest.mark.parametrize('statement', [
'yield from generator()',
])
@pytest.mark.parametrize('exception', [
'RuntimeError',
'RuntimeError()',
'RuntimeError(1)',
])
def test_other_exc_in_generators_yield_from(
assert_errors,
parse_ast_tree,
code,
statement,
exception,
default_options,
):
"""Testing that `raise` of other exceptions is allowed inside generators."""
tree = parse_ast_tree(code.format(statement, exception))
visitor = FunctionDefinitionVisitor(default_options, tree=tree)
visitor.run()
assert_errors(visitor, [])
| 23.212644
| 80
| 0.703887
| 407
| 4,039
| 6.739558
| 0.164619
| 0.080569
| 0.114838
| 0.040831
| 0.863288
| 0.863288
| 0.863288
| 0.863288
| 0.863288
| 0.863288
| 0
| 0.002988
| 0.17133
| 4,039
| 173
| 81
| 23.346821
| 0.816552
| 0.091359
| 0
| 0.874126
| 0
| 0
| 0.161361
| 0.019484
| 0
| 0
| 0
| 0
| 0.06993
| 1
| 0.034965
| false
| 0
| 0.020979
| 0
| 0.055944
| 0.006993
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee0ae15e9b23bab918714b996257759b66188b01
| 327,782
|
py
|
Python
|
CR/CR_eval/models_s.py
|
thunlp/DictSKB
|
ac7c328db4d25cf6bdc3c64aefa9b773854ba525
|
[
"MIT"
] | 2
|
2021-11-21T13:42:06.000Z
|
2022-01-11T05:33:43.000Z
|
CR/CR_eval/models_s.py
|
thunlp/DictSKB
|
ac7c328db4d25cf6bdc3c64aefa9b773854ba525
|
[
"MIT"
] | null | null | null |
CR/CR_eval/models_s.py
|
thunlp/DictSKB
|
ac7c328db4d25cf6bdc3c64aefa9b773854ba525
|
[
"MIT"
] | 3
|
2021-09-14T06:40:51.000Z
|
2021-12-23T09:41:22.000Z
|
import numpy as np
import time
import torch
import torch.nn as nn
from torch.nn import init
class SememeSumLstm(nn.Module):
def __init__(self, sememe_dim, mem_dim):
super(SememeSumLstm, self).__init__()
self.in_dim = sememe_dim
self.mem_dim = mem_dim
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.reset_parameters()
def node_forward(self, inputs):
iou = self.ioux(inputs)# three Wx+b
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
c = torch.mul(i, u)
h = torch.mul(o, torch.tanh(c))
return c, h
def forward(self, inputs):
max_time, batch_size, _ = inputs.size()
c = []
h = []
for time in range(max_time):
new_c, new_h = self.node_forward(inputs[time])
c.append(new_c)
h.append(new_h)
return torch.stack(c, 0), torch.stack(h, 0)
def reset_parameters(self):
layers = [self.ioux]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
class SememeSumGRU(nn.Module):
def __init__(self, sememe_dim, mem_dim):
super(SememeSumGRU, self).__init__()
self.in_dim = sememe_dim
self.mem_dim = mem_dim
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.reset_parameters()
def node_forward(self, inputs):
iou = self.ioux(inputs)# three Wx+b
i, o = torch.split(iou, iou.size(1) // 2, dim=1)
i, o = torch.sigmoid(i), torch.tanh(o)
h = torch.mul(i,o)
return h
def forward(self, inputs):
max_time, batch_size, _ = inputs.size()
h = []
for time in range(max_time):
new_h = self.node_forward(inputs[time])
h.append(new_h)
return torch.stack(h, 0)
def reset_parameters(self):
layers = [self.ioux]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
class LSTM_baseline(nn.Module):
def __init__(self, config, sememe):
super(LSTM_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.emb_sememe = nn.Embedding(2186, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fh, self.fs]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx)
output.append(next_hx[1])
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class LSTM_concat(nn.Module):
def __init__(self, config, sememe):
super(LSTM_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.fx, self.fh, self.fs]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, word_emb, length, sememe_data):
emb_s_1 = self.sememe_sum(sememe_data)
inputs = torch.cat([word_emb, emb_s_1], dim = 2)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx)
output.append(next_hx[1])
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class LSTM_gate(nn.Module):
def __init__(self, config, sememe):
super(LSTM_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 4 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
#self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fh, self.W_c]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_h, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
f, i, o, o_c = torch.split(iou, iou.size(1) // 4, dim=1)
f, i, o, o_c = torch.sigmoid(f), torch.sigmoid(i), torch.sigmoid(o), torch.sigmoid(o_c)
c_telta = self.fx(inputs) + self.fh(child_h)
c_telta = torch.tanh(c_telta)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, c_telta) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c)) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return (c, h)
def forward(self, inputs, length, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_h[time], hx)
output.append(next_hx[1])
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class LSTM_cell_bert_baseline(nn.Module):
def __init__(self, config, ):
super(LSTM_cell_bert_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememesumlstm = SememeSumLstm(512, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(512, self.enc_lstm_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
# 乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
# ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
# fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
self.dic_lemma = self.read_lemmatization('../../NLI/dataset/lemmatization.txt')
self.sense_tensor_dict = np.load('../../PrepareSememeDict/sense_tensor_dict.npy', allow_pickle=True).item()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fh, self.fs, self.fx_s]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_c, sememe_h, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
f_s = torch.sigmoid(
self.fs(sememe_h) + self.fx_s(inputs)
)
fc = torch.mul(f, child_c) # part of memory cell induced by word-child
fc_s = torch.mul(f_s, sememe_c) # part of memory cell induced by sememe-child
c = torch.mul(i, u) + fc + fc_s # sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, inputs, length, def_vec):
sememe_c, sememe_h = self.sememe_sum(def_vec)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, sememe_c.size()[2]).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, sememe_h.size()[2]).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_c[time], sememe_h[time], hx)
output.append(next_hx[1])
hx = next_hx
return torch.stack([output[length[i] - 1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(input_s[i].float())
input_sememe = torch.stack(input_sememe, dim=0)
sememe_c, sememe_h = self.sememesumlstm(input_sememe)
return sememe_c, sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)' % (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
# return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def read_lemmatization(self, lemma_dir):
dic_lemma = {}
for line in open(lemma_dir):
line = line.strip().split()
dic_lemma[line[1]] = line[0]
return dic_lemma
def get_def_vec_by_word(self, word):
word_lower = word.lower()
if word_lower in self.dic_lemma.keys():
word_lower = self.dic_lemma[word_lower]
if word_lower in self.sense_tensor_dict.keys():
tensor_list = self.sense_tensor_dict[word_lower]
base_tensor = np.zeros(512)
for pos, tensor in tensor_list:
base_tensor = np.add(base_tensor, tensor)
base_tensor = base_tensor / float(len(tensor_list))
return base_tensor
else:
return np.zeros(512)
def get_batch(self, batch, emb_dim=300, ):
embed = np.zeros((len(batch[0]), len(batch), 300))
# sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype=np.uint8)
def_data = np.zeros((len(batch[0]), len(batch), 512), dtype=np.float)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
def_data[j, i] = self.get_def_vec_by_word(batch[i][j])
return torch.from_numpy(embed).float(), torch.from_numpy(def_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False,):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], 300, )
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings) / (time.time() - tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class LSTM_cell(nn.Module):
def __init__(self, config, sememe):
super(LSTM_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fh, self.fs, self.fx_s]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_c, sememe_h, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
f_s = torch.sigmoid(
self.fs(sememe_h) + self.fx_s(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
fc_s = torch.mul(f_s, sememe_c) # part of memory cell induced by sememe-child
c = torch.mul(i, u) + fc + fc_s #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, inputs, length, sememe_data):
sememe_c, sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, sememe_c.size()[2]).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, sememe_h.size()[2]).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_c[time], sememe_h[time], hx)
output.append(next_hx[1])
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_c, sememe_h = self.sememesumlstm(input_sememe)
return sememe_c, sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300, size=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False, size=300):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe,300, size)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class LSTM_extra_void(nn.Module):
def __init__(self, config):
super(LSTM_extra_void, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.W_s = nn.Linear(config['sememe_size'], self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.fx, self.fh, self.W]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, inputs, length, sememe_data):
emb_s = sememe_data.float().cuda()
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx)
output.append(next_hx[1])
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = emb_s[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
class LSTM_extra_concat(nn.Module):
def __init__(self, config):
super(LSTM_extra_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.fx, self.fh, self.W]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, emb_s_concat):
child_c = hx[0]
child_h = hx[1]
inputs = torch.cat([inputs, emb_s_concat], dim = 1)
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, inputs, length, sememe_data):
emb_s = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx, emb_s[time])
output.append(next_hx[1])
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = emb_s[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class LSTM_extra_gate(nn.Module):
def __init__(self, config):
super(LSTM_extra_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 4 * self.mem_dim)
self.ious = nn.Linear(self.in_dim, 4 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.fc_s = nn.Linear(self.mem_dim, self.mem_dim)
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fh, self.W, self.fx_s, self.fh_s, self.fc_s, self.fs]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, emb_s):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(emb_s)
f, i, o, o_c = torch.split(iou, iou.size(1) // 4, dim=1)
f, i, o, o_c = torch.sigmoid(f), torch.sigmoid(i), torch.sigmoid(o), torch.sigmoid(o_c)
c_telta = self.fx(inputs) + self.fh(child_h)
c_telta = torch.tanh(c_telta)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, c_telta) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c)) + torch.mul(o_c, torch.tanh(self.W_c(emb_s)))
return (c, h)
def forward(self, inputs, length, sememe_data):
emb_s = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx, emb_s[time])
output.append(next_hx[1])
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = emb_s[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class LSTM_extra_cell(nn.Module):
def __init__(self, config):
super(LSTM_extra_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.W_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fs, self.fh, self.W]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_c, sememe_h, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
f_s = torch.sigmoid(
self.fs(sememe_h) + self.fx_s(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
fc_s = torch.mul(f_s, sememe_c) # part of memory cell induced by sememe-child
c = torch.mul(i, u) + fc + fc_s #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_c, sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_c[time], sememe_h[time], hx)
output.append(next_hx[1])
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = sememe_h[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_c, sememe_h = self.sememesumlstm(input_sememe)
return sememe_c, sememe_h
class BILSTM_baseline(nn.Module):
def __init__(self, config, sememe):
super(BILSTM_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fs_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s, self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward)
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward)
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BILSTM_concat(nn.Module):
def __init__(self, config, sememe):
super(BILSTM_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(2 * self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.fx, self.fx_b, self.fh, self.fh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BILSTM_gate(nn.Module):
def __init__(self, config, sememe):
super(BILSTM_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 4 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 4 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.ious_b = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.in_dim, self.mem_dim)
self.fs_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.W_c_b = nn.Linear(self.in_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s, self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b, self.W_c, self.W_c_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
f, i, o, o_c = torch.split(iou, iou.size(1) // 4, dim=1)
f, i, o, o_c = torch.sigmoid(f), torch.sigmoid(i), torch.sigmoid(o), torch.sigmoid(o_c)
c_telta = self.fx(inputs) + self.fh(child_h)
c_telta = torch.tanh(c_telta)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, c_telta) + fc#sum means sigma
h = torch.mul(o, torch.tanh(c)) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return (c, h)
def node_backward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
f, i, o, o_c = torch.split(iou, iou.size(1) // 4, dim=1)
f, i, o, o_c = torch.sigmoid(f), torch.sigmoid(i), torch.sigmoid(o), torch.sigmoid(o_c)
c_telta = self.fx_b(inputs) + self.fh_b(child_h)
c_telta = torch.tanh(c_telta)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, c_telta) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c)) + torch.mul(o_c, torch.tanh(self.W_c_b(sememe_h)))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float().cuda(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BILSTM_cell(nn.Module):
def __init__(self, config, sememe):
super(BILSTM_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fs_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s, self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_c, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
f_s = torch.sigmoid(
self.fs(sememe_h) + self.fx_s(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
fc_s = torch.mul(f_s, sememe_c)
c = torch.mul(i, u) + fc + fc_s#sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx, sememe_c, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
f_s_b = torch.sigmoid(
self.fs_b(sememe_h) + self.fx_s_b(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
fc_s = torch.mul(f_s_b, sememe_c)
c = torch.mul(i, u) + fc + fc_s #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sememe_c, sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
sememe_c = sememe_c.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_c[time, 0:pack_length[time]], sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_c[max_time-time-1, 0:pack_length[max_time-time-1]], sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_c, sememe_h = self.sememesumlstm(input_sememe)
return sememe_c, sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300, size=300):
print(size)
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False, size=3000):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe, 300, size)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BILSTM_cell_bert_baseline(nn.Module):
def __init__(self, config, ):
super(BILSTM_cell_bert_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
# self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(512, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(512, self.enc_lstm_dim)
# self.sememe_dim = config['sememe_dim']
# self.sememe_size = config['sememe_size']
# self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
# self.pool_type = config['pool_type']
# 乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
# ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
# fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fs_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
self.dic_lemma = self.read_lemmatization('../../NLI/dataset/lemmatization.txt')
self.sense_tensor_dict = np.load('../../PrepareSememeDict/sense_tensor_dict.npy', allow_pickle=True).item()
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s,
self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_c, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
f_s = torch.sigmoid(
self.fs(sememe_h) + self.fx_s(inputs)
)
fc = torch.mul(f, child_c) # part of memory cell induced by word-child
fc_s = torch.mul(f_s, sememe_c)
c = torch.mul(i, u) + fc + fc_s # sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx, sememe_c, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
f_s_b = torch.sigmoid(
self.fs_b(sememe_h) + self.fx_s_b(inputs)
)
fc = torch.mul(f, child_c) # part of memory cell induced by word-child
fc_s = torch.mul(f_s_b, sememe_c)
c = torch.mul(i, u) + fc + fc_s # sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, def_vecs):
# hx: (child_c, child_h)
sememe_c, sememe_h = self.sememe_sum(def_vecs)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
sememe_c = sememe_c.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype=np.int)
time_point = batch_size - 1
last_point = 0
while (True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point + 1
last_point = sent_len_sorted[time_point]
if (sent_len_sorted[time_point] == max_time):
break
time_point = time_point - 1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward,
sememe_c[time, 0:pack_length[time]], sememe_h[time, 0:pack_length[time]])
output_forward.append(
torch.cat([next_hx[1], torch.zeros([batch_size - next_hx[1].size()[0], self.mem_dim], device='cuda')],
dim=0))
if (time < max_time - 1):
hx_forward = (next_hx[0][0:pack_length[time + 1]], next_hx[1][0:pack_length[time + 1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (sent[0][0].detach().new(pack_length[max_time - 1], self.mem_dim).fill_(0.).requires_grad_(),
sent[0][0].detach().new(pack_length[max_time - 1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time - time - 1, 0:pack_length[max_time - time - 1]], hx_backward,
sememe_c[max_time - time - 1, 0:pack_length[max_time - time - 1]],
sememe_h[max_time - time - 1, 0:pack_length[max_time - time - 1]])
output_backward[max_time - time - 1] = torch.cat(
[next_hx[1], torch.zeros([batch_size - next_hx[1].size()[0], self.mem_dim], device='cuda')], dim=0)
if (time < max_time - 1):
hx_backward = (torch.cat([next_hx[0], torch.zeros(
[pack_length[max_time - time - 2] - next_hx[0].size()[0], self.mem_dim]).cuda()], dim=0), \
torch.cat([next_hx[1], torch.zeros(
[pack_length[max_time - time - 2] - next_hx[1].size()[0], self.mem_dim]).cuda()],
dim=0))
a = torch.stack(output_forward, dim=0)
b = torch.stack(output_backward, dim=0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i] - 1][i] for i in range(batch_size)], dim=0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim=1)
return final_output
def sememe_sum(self, input_s):
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(input_s[i].float())
input_sememe = torch.stack(input_sememe, dim=0)
sememe_c, sememe_h = self.sememesumlstm(input_sememe)
return sememe_c, sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)' % (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
# return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def read_lemmatization(self, lemma_dir):
dic_lemma = {}
for line in open(lemma_dir):
line = line.strip().split()
dic_lemma[line[1]] = line[0]
return dic_lemma
def get_def_vec_by_word(self, word):
word_lower = word.lower()
if word_lower in self.dic_lemma.keys():
word_lower = self.dic_lemma[word_lower]
if word_lower in self.sense_tensor_dict.keys():
tensor_list = self.sense_tensor_dict[word_lower]
base_tensor = np.zeros(512)
for pos, tensor in tensor_list:
base_tensor = np.add(base_tensor, tensor)
base_tensor = base_tensor / float(len(tensor_list))
return base_tensor
else:
return np.zeros(512)
def get_batch(self, batch, emb_dim=300, ):
embed = np.zeros((len(batch[0]), len(batch), 300))
# sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype=np.uint8)
def_data = np.zeros((len(batch[0]), len(batch), 512), dtype=np.float)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
def_data[j, i] = self.get_def_vec_by_word(batch[i][j])
return torch.from_numpy(embed).float(), torch.from_numpy(def_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False, size=3000):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], 300)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings) / (time.time() - tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BILSTM_extra_void(nn.Module):
def __init__(self, config):
super(BILSTM_extra_void, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fs_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(config['sememe_size'], self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(config['sememe_size'], self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s, self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
emb_s = sememe_data.float().cuda()
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward)
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward)
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = emb_s[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = emb_s[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
class BILSTM_extra_concat(nn.Module):
def __init__(self, config):
super(BILSTM_extra_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(2 * self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.fx, self.fx_b, self.fh, self.fh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux(inputs) + self.iouh(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, u) + fc
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
sememe_h = sememe_h.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class BILSTM_extra_gate(nn.Module):
def __init__(self, config):
super(BILSTM_extra_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 4 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 4 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.ious_b = nn.Linear(self.in_dim, 4 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.in_dim, self.mem_dim)
self.fs_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.W_c_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s, self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b, self.W_c, self.W_c_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
f, i, o, o_c = torch.split(iou, iou.size(1) // 4, dim=1)
f, i, o, o_c = torch.sigmoid(f), torch.sigmoid(i), torch.sigmoid(o), torch.sigmoid(o_c)
c_telta = self.fx(inputs) + self.fh(child_h)
c_telta = torch.tanh(c_telta)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, c_telta) + fc#sum means sigma
h = torch.mul(o, torch.tanh(c)) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return (c, h)
def node_backward(self, inputs, hx, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
f, i, o, o_c = torch.split(iou, iou.size(1) // 4, dim=1)
f, i, o, o_c = torch.sigmoid(f), torch.sigmoid(i), torch.sigmoid(o), torch.sigmoid(o_c)
c_telta = self.fx_b(inputs) + self.fh_b(child_h)
c_telta = torch.tanh(c_telta)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
c = torch.mul(i, c_telta) + fc #sum means sigma
h = torch.mul(o, torch.tanh(c)) + torch.mul(o_c, torch.tanh(self.W_c_b(sememe_h)))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
sememe_h = sememe_h.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float().cuda(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class BILSTM_extra_cell(nn.Module):
def __init__(self, config):
super(BILSTM_extra_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 3 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.ious = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 3 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
self.fh_b = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fs_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.ioux_b, self.iouh, self.iouh_b, self.ious, self.ious_b, self.fx, self.fx_b, self.fx_s, self.fx_s_b, self.fh, self.fh_b, self.fs, self.fs_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_c, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh(child_h) + self.fx(inputs)
)
f_s = torch.sigmoid(
self.fs(sememe_h) + self.fx_s(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
fc_s = torch.mul(f_s, sememe_c)
c = torch.mul(i, u) + fc + fc_s#sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def node_backward(self, inputs, hx, sememe_c, sememe_h):
child_c = hx[0]
child_h = hx[1]
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
i, o, u = torch.split(iou, iou.size(1) // 3, dim=1)
i, o, u = torch.sigmoid(i), torch.sigmoid(o), torch.tanh(u)
f = torch.sigmoid(
self.fh_b(child_h) + self.fx_b(inputs)
)
f_s_b = torch.sigmoid(
self.fs_b(sememe_h) + self.fx_s_b(inputs)
)
fc = torch.mul(f, child_c) #part of memory cell induced by word-child
fc_s = torch.mul(f_s_b, sememe_c)
c = torch.mul(i, u) + fc + fc_s #sum means sigma
h = torch.mul(o, torch.tanh(c))
return (c, h)
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sememe_c, sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
sememe_c = sememe_c.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = (inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_c[time, 0:pack_length[time]], sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = (next_hx[0][0:pack_length[time+1]], next_hx[1][0:pack_length[time+1]])
output_backward = [[] for i in range(max_time)]
hx_backward = (inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_(),
inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_())
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_c[max_time-time-1, 0:pack_length[max_time-time-1]], sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx[1], torch.zeros([batch_size-next_hx[1].size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = (torch.cat([next_hx[0], torch.zeros([pack_length[max_time-time-2]-next_hx[0].size()[0], self.mem_dim]).cuda()], dim = 0), \
torch.cat([next_hx[1], torch.zeros([pack_length[max_time-time-2]-next_hx[1].size()[0], self.mem_dim]).cuda()], dim = 0))
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
sememe_h = sememe_h.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_c, sememe_h = self.sememesumlstm(input_sememe)
return sememe_c, sememe_h
class GRU_baseline(nn.Module):
def __init__(self, config, sememe):
super(GRU_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.Uh_s]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx)
output.append(next_hx)
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class GRU_concat(nn.Module):
def __init__(self, config, sememe):
super(GRU_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.Uh_s]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, word_emb, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
inputs = torch.cat([word_emb, sememe_h], dim = 2)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx)
output.append(next_hx)
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class GRU_gate(nn.Module):
def __init__(self, config, sememe):
super(GRU_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.fs = nn.Linear(self.in_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fh_s, self.fs, self.Uh, self.W_c]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_h, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
o_c = self.fx_s(inputs) + self.fh_s(child_h) + self.fs(sememe_h)
o_c = torch.sigmoid(o_c)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_h[time], hx)
output.append(next_hx)
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class GRU_cell_bert_baseline(nn.Module):
def __init__(self, config):
super(GRU_cell_bert_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememesumlstm = SememeSumLstm(512, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(512, self.enc_lstm_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
# 乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
self.dic_lemma = self.read_lemmatization('../../NLI/dataset/lemmatization.txt')
self.sense_tensor_dict = np.load('../../PrepareSememeDict/sense_tensor_dict.npy', allow_pickle=True).item()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_h, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1 - z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, inputs, length, def_vec):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(def_vec)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, sememe_h.size()[2]).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_h[time], hx)
output.append(next_hx)
hx = next_hx
return torch.stack([output[length[i] - 1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(input_s[i].float())
input_sememe = torch.stack(input_sememe, dim=0)
sememe_h = self.sememesumGRU(input_sememe)
return sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)' % (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
# return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def read_lemmatization(self, lemma_dir):
dic_lemma = {}
for line in open(lemma_dir):
line = line.strip().split()
dic_lemma[line[1]] = line[0]
return dic_lemma
def get_def_vec_by_word(self, word):
word_lower = word.lower()
if word_lower in self.dic_lemma.keys():
word_lower = self.dic_lemma[word_lower]
if word_lower in self.sense_tensor_dict.keys():
tensor_list = self.sense_tensor_dict[word_lower]
base_tensor = np.zeros(512)
for pos, tensor in tensor_list:
base_tensor = np.add(base_tensor, tensor)
base_tensor = base_tensor / float(len(tensor_list))
return base_tensor
else:
return np.zeros(512)
def get_batch(self, batch, emb_dim=300, ):
embed = np.zeros((len(batch[0]), len(batch), 300))
# sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype=np.uint8)
def_data = np.zeros((len(batch[0]), len(batch), 512), dtype=np.float)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
def_data[j, i] = self.get_def_vec_by_word(batch[i][j])
return torch.from_numpy(embed).float(), torch.from_numpy(def_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False,):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], 300,)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings) / (time.time() - tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class GRU_cell(nn.Module):
def __init__(self, config, sememe):
super(GRU_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_h, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, sememe_h.size()[2]).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_h[time], hx)
output.append(next_hx)
hx = next_hx
return torch.stack([output[length[i]-1][i] for i in range(len(length))], 0)
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_h = self.sememesumGRU(input_sememe)
return sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300, size=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False, size=300):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe, 300,size)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class GRU_extra_void(nn.Module):
def __init__(self, config):
super(GRU_extra_void, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s = nn.Linear(config['sememe_size'], self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fs, self.fh, self.W, self.Uh, self.Uh_s]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = sememe_data.float().cuda()
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx)
output.append(next_hx)
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = sememe_h[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
class GRU_extra_concat(nn.Module):
def __init__(self, config):
super(GRU_extra_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fs, self.fh, self.W, self.Uh, self.Uh_s]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], hx, sememe_h[time])
output.append(next_hx)
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = sememe_h[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class GRU_extra_gate(nn.Module):
def __init__(self, config):
super(GRU_extra_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.in_dim, 2 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.in_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.fh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fs, self.fh, self.W, self.Uh, self.Uh_s, self.fh_s, self.W_c]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_h, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
o_c = self.fx_s(inputs) + self.fh_s(child_h) + self.fs(sememe_h)
o_c = torch.sigmoid(o_c)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_h[time], hx)
output.append(next_hx)
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = sememe_h[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class GRU_extra_cell(nn.Module):
def __init__(self, config):
super(GRU_extra_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
#ious是专门处理sememe传过来的c 和 h,c和h都是mem_dim维的
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.mem_dim, self.mem_dim)
self.fh = nn.Linear(self.mem_dim, self.mem_dim)
#fs是专门处理sememe传过来的c和h
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.fx_s, self.fs, self.fh, self.W, self.Uh]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, sememe_h, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, inputs, length, sememe_data):
# hx: (child_c, child_h)
sememe_h = self.sememe_sum(sememe_data)
max_time, batch_size, _ = inputs.size()
output = []
hx = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(inputs[time], sememe_h[time], hx)
output.append(next_hx)
hx = next_hx
new_output = []
new_output_2 = []
for i in range(len(length)):
hidden_old = torch.stack(output[0:length[i]], dim = 0)[:, i, :]
new_output_2.append(torch.index_select(output[length[i]-1], 0, torch.tensor(i, device = 'cuda')))
hidden = self.W(hidden_old)
emb_s_sum = sememe_h[0:length[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output.append(torch.mm(att.transpose(1,0), hidden_old))
new_output = self.W_p(torch.squeeze(torch.stack(new_output, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2, dim = 0)))
new_output = torch.tanh(new_output)
return new_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_h = self.sememesumGRU(input_sememe)
return sememe_h
class BIGRU_baseline(nn.Module):
def __init__(self, config, sememe):
super(BIGRU_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.Uh_s,self.ioux_b, self.iouh_b, self.ious_b, self.fx_b, self.Uh_b, self.Uh_s_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward)
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward)
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BIGRU_concat(nn.Module):
def __init__(self, config, sememe):
super(BIGRU_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(2 * self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(2 * self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.fx, self.Uh, self.ioux_b, self.iouh_b, self.fx_b, self.Uh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx, sememe_h):
child_h = hx
inputs = torch.cat([inputs, sememe_h], dim = 1)
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BIGRU_gate(nn.Module):
def __init__(self, config, sememe):
super(BIGRU_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.in_dim, self.mem_dim)
self.fh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fs_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.W_c_b = nn.Linear(self.in_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.ioux_b, self.iouh_b, self.ious_b, self.fx_b, self.Uh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
o_c = self.fx_s(inputs) + self.fh_s(child_h) + self.fs(sememe_h)
o_c = torch.sigmoid(o_c)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return h
def node_backward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
o_c = self.fx_s_b(inputs) + self.fh_s_b(child_h) + self.fs_b(sememe_h)
o_c = torch.sigmoid(o_c)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta) + torch.mul(o_c, torch.tanh(self.W_c_b(sememe_h)))
return h
def forward(self, sent, sent_len, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), 2186), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BIGRU_cell_bert_baseline(nn.Module):
def __init__(self, config,):
super(BIGRU_cell_bert_baseline, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememesumlstm = SememeSumLstm(512, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(512, self.enc_lstm_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
# self.pool_type = config['pool_type']
# 乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
self.dic_lemma = self.read_lemmatization('../../NLI/dataset/lemmatization.txt')
self.sense_tensor_dict = np.load('../../PrepareSememeDict/sense_tensor_dict.npy', allow_pickle=True).item()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.ioux_b, self.iouh_b, self.ious_b, self.fx_b,
self.Uh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1 - z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1 - z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, def_vec):
sememe_h = self.sememe_sum(def_vec)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype=np.int)
time_point = batch_size - 1
last_point = 0
while (True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point + 1
last_point = sent_len_sorted[time_point]
if (sent_len_sorted[time_point] == max_time):
break
time_point = time_point - 1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward,
sememe_h[time, 0:pack_length[time]])
output_forward.append(
torch.cat([next_hx, torch.zeros([batch_size - next_hx.size()[0], self.mem_dim], device='cuda')], dim=0))
if (time < max_time - 1):
hx_forward = next_hx[0:pack_length[time + 1]]
output_backward = [[] for i in range(max_time)]
hx_backward = sent[0][0].detach().new(pack_length[max_time - 1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time - time - 1, 0:pack_length[max_time - time - 1]], hx_backward,
sememe_h[max_time - time - 1, 0:pack_length[max_time - time - 1]])
output_backward[max_time - time - 1] = torch.cat(
[next_hx, torch.zeros([batch_size - next_hx.size()[0], self.mem_dim], device='cuda')], dim=0)
if (time < max_time - 1):
hx_backward = torch.cat(
[next_hx, torch.zeros([pack_length[max_time - time - 2] - next_hx.size()[0], self.mem_dim]).cuda()],
dim=0)
a = torch.stack(output_forward, dim=0)
b = torch.stack(output_backward, dim=0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i] - 1][i] for i in range(batch_size)], dim=0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim=1)
return final_output
def sememe_sum(self, input_s):
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(input_s[i].float())
input_sememe = torch.stack(input_sememe, dim=0)
sememe_h = self.sememesumGRU(input_sememe)
return sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)' % (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
# return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def read_lemmatization(self, lemma_dir):
dic_lemma = {}
for line in open(lemma_dir):
line = line.strip().split()
dic_lemma[line[1]] = line[0]
return dic_lemma
def get_def_vec_by_word(self, word):
word_lower = word.lower()
if word_lower in self.dic_lemma.keys():
word_lower = self.dic_lemma[word_lower]
if word_lower in self.sense_tensor_dict.keys():
tensor_list = self.sense_tensor_dict[word_lower]
base_tensor = np.zeros(512)
for pos, tensor in tensor_list:
base_tensor = np.add(base_tensor, tensor)
base_tensor = base_tensor / float(len(tensor_list))
return base_tensor
else:
return np.zeros(512)
def get_batch(self, batch, emb_dim=300, ):
embed = np.zeros((len(batch[0]), len(batch), 300))
# sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype=np.uint8)
def_data = np.zeros((len(batch[0]), len(batch), 512), dtype=np.float)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
def_data[j, i] = self.get_def_vec_by_word(batch[i][j])
return torch.from_numpy(embed).float(), torch.from_numpy(def_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False, ):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], 300, )
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings) / (time.time() - tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BIGRU_cell(nn.Module):
def __init__(self, config, sememe):
super(BIGRU_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.reset_parameters()
self.sememe = sememe
self.bos = '<s>'
self.eos = '</s>'
self.max_pad = True
self.moses_tok = False
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.ioux_b, self.iouh_b, self.ious_b, self.fx_b, self.Uh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h)+ torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = sent[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = sent[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
final_output_forward = torch.stack([sent_output_forward[sent_len[i]-1][i] for i in range(batch_size)], dim = 0)
final_output = torch.cat([final_output_forward, sent_output_backward[0]], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_h = self.sememesumGRU(input_sememe)
return sememe_h
def set_w2v_path(self, w2v_path):
self.w2v_path = w2v_path
def get_word_dict(self, sentences, tokenize=True):
# create vocab of words
word_dict = {}
sentences = [s.split() if not tokenize else self.tokenize(s) for s in sentences]
for sent in sentences:
for word in sent:
if word not in word_dict:
word_dict[word] = ''
word_dict[self.bos] = ''
word_dict[self.eos] = ''
return word_dict
def get_w2v(self, word_dict):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with w2v vectors
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if word in word_dict:
word_vec[word] = np.fromstring(vec, sep=' ')
print('Found %s(/%s) words with w2v vectors' % (len(word_vec), len(word_dict)))
return word_vec
def get_w2v_k(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
# create word_vec with k first w2v vectors
k = 0
word_vec = {}
with open(self.w2v_path, encoding='utf-8') as f:
for line in f:
word, vec = line.split(' ', 1)
if k <= K:
word_vec[word] = np.fromstring(vec, sep=' ')
k += 1
if k > K:
if word in [self.bos, self.eos]:
word_vec[word] = np.fromstring(vec, sep=' ')
if k > K and all([w in word_vec for w in [self.bos, self.eos]]):
break
return word_vec
def build_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
word_dict = self.get_word_dict(sentences, tokenize)
self.word_vec = self.get_w2v(word_dict)
print('Vocab size : %s' % (len(self.word_vec)))
# build w2v vocab with k most frequent words
def build_vocab_k_words(self, K):
assert hasattr(self, 'w2v_path'), 'w2v path not set'
self.word_vec = self.get_w2v_k(K)
print('Vocab size : %s' % (K))
def update_vocab(self, sentences, tokenize=True):
assert hasattr(self, 'w2v_path'), 'warning : w2v path not set'
assert hasattr(self, 'word_vec'), 'build_vocab before updating it'
word_dict = self.get_word_dict(sentences, tokenize)
# keep only new words
for word in self.word_vec:
if word in word_dict:
del word_dict[word]
# udpate vocabulary
if word_dict:
new_word_vec = self.get_w2v(word_dict)
self.word_vec.update(new_word_vec)
else:
new_word_vec = []
print('New vocab size : %s (added %s words)'% (len(self.word_vec), len(new_word_vec)))
'''
def get_batch(self, batch):
# sent in batch in decreasing order of lengths
# batch: (bsize, max_len, word_dim)
embed = np.zeros((len(batch[0]), len(batch), self.word_emb_dim))
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
return torch.FloatTensor(embed)
'''
def is_cuda(self):
# either all weights are on cpu or they are on gpu
return True
#return self.enc_lstm.bias_hh_l0.data.is_cuda
def tokenize(self, s):
from nltk.tokenize import word_tokenize
if self.moses_tok:
s = ' '.join(word_tokenize(s))
s = s.replace(" n't ", "n 't ") # HACK to get ~MOSES tokenization
return s.split()
else:
return word_tokenize(s)
def prepare_samples(self, sentences, bsize, tokenize, verbose):
sentences = [[self.bos] + s.split() + [self.eos] if not tokenize else
[self.bos] + self.tokenize(s) + [self.eos] for s in sentences]
n_w = np.sum([len(x) for x in sentences])
# filters words without w2v vectors
for i in range(len(sentences)):
s_f = [word for word in sentences[i] if word in self.word_vec]
if not s_f:
import warnings
warnings.warn('No words in "%s" (idx=%s) have w2v vectors. \
Replacing by "</s>"..' % (sentences[i], i))
s_f = [self.eos]
sentences[i] = s_f
lengths = np.array([len(s) for s in sentences])
n_wk = np.sum(lengths)
if verbose:
print('Nb words kept : %s/%s (%.1f%s)' % (
n_wk, n_w, 100.0 * n_wk / n_w, '%'))
# sort by decreasing length
lengths, idx_sort = np.sort(lengths)[::-1], np.argsort(-lengths)
sentences = np.array(sentences)[idx_sort]
return sentences, lengths, idx_sort
def get_batch(self, batch, sememe, emb_dim=300, size=300):
embed = np.zeros((len(batch[0]), len(batch), 300))
sememe_data = np.zeros((len(batch[0]), len(batch), size), dtype = np.uint8)
for i in range(len(batch)):
for j in range(len(batch[i])):
embed[j, i, :] = self.word_vec[batch[i][j]]
for k in sememe.read_word_sememe(batch[i][j]):
sememe_data[j, i, k] = 1
return torch.from_numpy(embed).float(), torch.from_numpy(sememe_data).cuda()
def encode(self, sentences, bsize=64, tokenize=True, verbose=False, size=300):
tic = time.time()
sentences, lengths, idx_sort = self.prepare_samples(
sentences, bsize, tokenize, verbose)
embeddings = []
for stidx in range(0, len(sentences), bsize):
batch, batch_s = self.get_batch(sentences[stidx:stidx + bsize], self.sememe, 300, size=size)
if self.is_cuda():
batch = batch.cuda()
with torch.no_grad():
batch = self.forward(batch, lengths[stidx:stidx + bsize], batch_s).data.cpu().numpy()
embeddings.append(batch)
embeddings = np.vstack(embeddings)
# unsort
idx_unsort = np.argsort(idx_sort)
embeddings = embeddings[idx_unsort]
if verbose:
print('Speed : %.1f sentences/s (%s mode, bsize=%s)' % (
len(embeddings)/(time.time()-tic),
'gpu' if self.is_cuda() else 'cpu', bsize))
return embeddings
class BIGRU_extra_void(nn.Module):
def __init__(self, config):
super(BIGRU_extra_void, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(config['sememe_size'], self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(config['sememe_size'], self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.Uh_s,self.ioux_b, self.iouh_b, self.ious_b, self.fx_b, self.Uh_b, self.Uh_s_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, sememe_data):
# hx: (child_c, child_h)
emb_s = sememe_data.float().cuda()
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward)
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward)
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = emb_s[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = emb_s[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
class BIGRU_extra_concat(nn.Module):
def __init__(self, config):
super(BIGRU_extra_concat, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(2 * self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(2 * self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(2 * self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(2 * self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.fx, self.Uh, self.Uh_s,self.ioux_b, self.iouh_b, self.fx_b, self.Uh_b, self.Uh_s_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, emb_s):
child_h = hx
inputs = torch.cat([inputs, emb_s], dim = 1)
iou = self.ioux(inputs) + self.iouh(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx, emb_s):
child_h = hx
inputs = torch.cat([inputs, emb_s], dim = 1)
iou = self.ioux_b(inputs) + self.iouh_b(child_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
emb_s = emb_s.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, emb_s[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, emb_s[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
emb_s = emb_s.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = emb_s[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = emb_s[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class BIGRU_extra_gate(nn.Module):
def __init__(self, config):
super(BIGRU_extra_gate, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.fx_s = nn.Linear(self.in_dim, self.mem_dim)
self.fs = nn.Linear(self.in_dim, self.mem_dim)
self.fh_s = nn.Linear(self.mem_dim, self.mem_dim)
self.fx_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.fs_b = nn.Linear(self.in_dim, self.mem_dim)
self.fh_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_c = nn.Linear(self.in_dim, self.mem_dim)
self.W_c_b = nn.Linear(self.in_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(self.in_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(self.in_dim, self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.ioux_b, self.iouh_b, self.ious_b, self.fx_b, self.Uh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
o_c = self.fx_s(inputs) + self.fh_s(child_h) + self.fs(sememe_h)
o_c = torch.sigmoid(o_c)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta) + torch.mul(o_c, torch.tanh(self.W_c(sememe_h)))
return h
def node_backward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
o_c = self.fx_s_b(inputs) + self.fh_s_b(child_h) + self.fs_b(sememe_h)
o_c = torch.sigmoid(o_c)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta) + torch.mul(o_c, torch.tanh(self.W_c_b(sememe_h)))
return h
def forward(self, sent, sent_len, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
sememe_h = sememe_h.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
return input_sememe
class BIGRU_extra_cell(nn.Module):
def __init__(self, config):
super(BIGRU_extra_cell, self).__init__()
self.enc_lstm_dim = config['enc_lstm_dim']
self.sememe_dim = config['sememe_dim']
self.sememesumlstm = SememeSumLstm(self.sememe_dim, self.enc_lstm_dim)
self.sememesumGRU = SememeSumGRU(self.sememe_dim, self.enc_lstm_dim)
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.in_dim = config['word_emb_dim']
self.mem_dim = config['enc_lstm_dim']
#self.pool_type = config['pool_type']
#乘3代表3种矩阵,它后来用split分开了
self.ioux = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.ioux_b = nn.Linear(self.in_dim, 2 * self.mem_dim)
self.iouh = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.iouh_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.ious_b = nn.Linear(self.mem_dim, 2 * self.mem_dim)
self.fx = nn.Linear(self.in_dim, self.mem_dim)
self.fx_b = nn.Linear(self.in_dim, self.mem_dim)
self.Uh = nn.Linear(self.mem_dim, self.mem_dim)
self.Uh_b = nn.Linear(self.mem_dim, self.mem_dim)
self.max_pad = True
self.W_s = nn.Linear(self.mem_dim, self.mem_dim)
self.W = nn.Linear(self.mem_dim, self.mem_dim)
self.query = nn.Embedding(2*self.mem_dim, 1)
self.W_p = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x = nn.Linear(self.mem_dim, self.mem_dim)
self.W_s_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_b = nn.Linear(self.mem_dim, self.mem_dim)
self.query_b = nn.Embedding(2*self.mem_dim, 1)
self.W_p_b = nn.Linear(self.mem_dim, self.mem_dim)
self.W_x_b = nn.Linear(self.mem_dim, self.mem_dim)
self.reset_parameters()
def reset_parameters(self):
layers = [self.ioux, self.iouh, self.ious, self.fx, self.Uh, self.ioux_b, self.iouh_b, self.ious_b, self.fx_b, self.Uh_b]
for layer in layers:
init.kaiming_normal_(layer.weight)
if layer.bias is not None:
init.constant_(layer.bias, val=0)
def node_forward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux(inputs) + self.iouh(child_h) + self.ious(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx(inputs) + self.Uh(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def node_backward(self, inputs, hx, sememe_h):
child_h = hx
iou = self.ioux_b(inputs) + self.iouh_b(child_h) + self.ious_b(sememe_h)
z, r = torch.split(iou, iou.size(1) // 2, dim=1)
z, r = torch.sigmoid(z), torch.sigmoid(r)
h_telta = self.fx_b(inputs) + self.Uh_b(torch.mul(r, child_h) + torch.mul(r, sememe_h))
h_telta = torch.tanh(h_telta)
h = torch.mul((1-z), child_h) + torch.mul(z, h_telta)
return h
def forward(self, sent, sent_len, sememe_data):
sememe_h = self.sememe_sum(sememe_data)
sent_len_sorted, idx_sort = np.sort(sent_len)[::-1], np.argsort(-sent_len)
sent_len_sorted = sent_len_sorted.copy()
idx_unsort = np.argsort(idx_sort)
idx_sort = torch.from_numpy(idx_sort).cuda()
sent = sent.index_select(1, idx_sort)
sememe_h = sememe_h.index_select(1, idx_sort)
max_time, batch_size, _ = sent.size()
pack_length = np.zeros([max_time, 1], dtype = np.int)
time_point = batch_size-1
last_point = 0
while(True):
pack_length[last_point: sent_len_sorted[time_point]] = time_point+1
last_point = sent_len_sorted[time_point]
if(sent_len_sorted[time_point] == max_time):
break
time_point = time_point-1
pack_length = torch.from_numpy(pack_length).cuda()
output_forward = []
hx_forward = inputs[0][0].detach().new(batch_size, self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_forward(sent[time, 0:pack_length[time]], hx_forward, sememe_h[time, 0:pack_length[time]])
output_forward.append(torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0))
if(time < max_time-1):
hx_forward = next_hx[0:pack_length[time+1]]
output_backward = [[] for i in range(max_time)]
hx_backward = inputs[0][0].detach().new(pack_length[max_time-1], self.mem_dim).fill_(0.).requires_grad_()
for time in range(max_time):
next_hx = self.node_backward(sent[max_time-time-1, 0:pack_length[max_time-time-1]], hx_backward, sememe_h[max_time-time-1, 0:pack_length[max_time-time-1]])
output_backward[max_time-time-1] = torch.cat([next_hx, torch.zeros([batch_size-next_hx.size()[0], self.mem_dim], device = 'cuda')], dim = 0)
if(time < max_time-1):
hx_backward = torch.cat([next_hx, torch.zeros([pack_length[max_time-time-2]-next_hx.size()[0], self.mem_dim]).cuda()], dim = 0)
a = torch.stack(output_forward, dim = 0)
b = torch.stack(output_backward, dim = 0)
idx_unsort = torch.from_numpy(idx_unsort).cuda()
sent_output_forward = a.index_select(1, idx_unsort)
sent_output_backward = b.index_select(1, idx_unsort)
sememe_h = sememe_h.index_select(1, idx_unsort)
new_output_forward = []
new_output_2_forward = []
new_output_backward = []
for i in range(len(sent_len)):
hidden_old_forward = sent_output_forward[0:sent_len[i], i, :]
new_output_2_forward.append(sent_output_forward[sent_len[i]-1, i])
hidden = self.W(hidden_old_forward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query.weight))
new_output_forward.append(torch.mm(att.transpose(1,0), hidden_old_forward))
new_output_forward = self.W_p(torch.squeeze(torch.stack(new_output_forward, dim = 0))) + self.W_x(torch.squeeze(torch.stack(new_output_2_forward, dim = 0)))
new_output_forward = torch.tanh(new_output_forward)
for i in range(len(sent_len)):
hidden_old_backward = sent_output_backward[0:sent_len[i], i, :]
hidden = self.W_b(hidden_old_backward)
emb_s_sum = sememe_h[0:sent_len[i], i, :]
emb_s_sum = self.W_s_b(emb_s_sum)
hidden = torch.cat([hidden, emb_s_sum], dim = 1)
att = torch.tanh(torch.mm(hidden, self.query_b.weight))
new_output_backward.append(torch.mm(att.transpose(1,0), hidden_old_backward))
new_output_backward = self.W_p_b(torch.squeeze(torch.stack(new_output_backward, dim = 0))) + self.W_x_b(sent_output_backward[0])
new_output_backward = torch.tanh(new_output_backward)
final_output = torch.cat([new_output_forward, new_output_backward], dim = 1)
return final_output
def sememe_sum(self, input_s):
emb_sememe = self.emb_sememe.weight
input_sememe = []
for i in range(input_s.size()[0]):
input_sememe.append(torch.mm(input_s[i].float(), emb_sememe))
input_sememe = torch.stack(input_sememe, dim = 0)
sememe_h = self.sememesumGRU(input_sememe)
return sememe_h
class NLINet(nn.Module):
def __init__(self, config):
super(NLINet, self).__init__()
# classifier
self.nonlinear_fc = config['nonlinear_fc']
self.fc_dim = config['fc_dim']
self.n_classes = config['n_classes']
self.enc_lstm_dim = config['enc_lstm_dim']
self.encoder_type = config['encoder_type']
self.dpout_fc = config['dpout_fc']
self.sememe_dim = config['sememe_dim']
self.sememe_size = config['sememe_size']
#self.emb_sememe = nn.Embedding(self.sememe_size, self.sememe_dim)
self.encoder = eval(self.encoder_type)(config)
self.inputdim = 4*self.enc_lstm_dim
self.inputdim = int(self.inputdim)
if self.nonlinear_fc:
if 'BI' in self.encoder_type:
self.classifier = nn.Sequential(
nn.Dropout(p=self.dpout_fc),
nn.Linear(self.inputdim * 2, self.fc_dim),
nn.Tanh(),
nn.Dropout(p=self.dpout_fc),
nn.Linear(self.fc_dim, self.fc_dim),
nn.Tanh(),
nn.Dropout(p=self.dpout_fc),
nn.Linear(self.fc_dim, self.n_classes),
)
else:
self.classifier = nn.Sequential(
nn.Dropout(p=self.dpout_fc),
nn.Linear(self.inputdim, self.fc_dim),
nn.Tanh(),
nn.Dropout(p=self.dpout_fc),
nn.Linear(self.fc_dim, self.fc_dim),
nn.Tanh(),
nn.Dropout(p=self.dpout_fc),
nn.Linear(self.fc_dim, self.n_classes),
)
else:
if 'BI' in self.encoder_type:
self.classifier = nn.Sequential(
nn.Linear(self.inputdim * 2, self.fc_dim),
nn.Linear(self.fc_dim, self.fc_dim),
nn.Linear(self.fc_dim, self.n_classes)
)
else:
self.classifier = nn.Sequential(
nn.Linear(self.inputdim, self.fc_dim),
nn.Linear(self.fc_dim, self.fc_dim),
nn.Linear(self.fc_dim, self.n_classes)
)
def forward(self, s1, s2):
u = self.encoder(s1[0], s1[1], s1[2])
v = self.encoder(s2[0], s2[1], s2[2])
features = torch.cat((u, v, torch.abs(u-v), u*v), 1)
output = self.classifier(features)
return output
| 43.640261
| 226
| 0.592677
| 48,252
| 327,782
| 3.804651
| 0.006238
| 0.039617
| 0.049515
| 0.044765
| 0.996405
| 0.995985
| 0.995909
| 0.99494
| 0.994231
| 0.986093
| 0
| 0.013767
| 0.2778
| 327,782
| 7,510
| 227
| 43.646072
| 0.761744
| 0.038388
| 0
| 0.967417
| 0
| 0
| 0.03482
| 0.001043
| 0
| 0
| 0
| 0
| 0.020155
| 1
| 0.075244
| false
| 0
| 0.007558
| 0.003359
| 0.142257
| 0.020322
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee668a7d6f316a01acedff6ebcb526fcde1f48de
| 403
|
py
|
Python
|
ex017.py
|
msmeireles/100-Python-Exercises
|
7a7bca9cc01f794029d4d83ebdca8f0d55d39cf5
|
[
"MIT"
] | null | null | null |
ex017.py
|
msmeireles/100-Python-Exercises
|
7a7bca9cc01f794029d4d83ebdca8f0d55d39cf5
|
[
"MIT"
] | null | null | null |
ex017.py
|
msmeireles/100-Python-Exercises
|
7a7bca9cc01f794029d4d83ebdca8f0d55d39cf5
|
[
"MIT"
] | null | null | null |
#opo = float(input('Cateto oposto: '))
#adj = float(input('Cateto adjacente: '))
#hip = ((opo)**2+(adj)**2)**(1/2)
#print('A hipotenusa do respectivo triângulo retângulo vale {}.'.format(hip))
import math
opo = float(input('Cateto oposto: '))
adj = float(input('Cateto adjacente: '))
hip = math.hypot(opo, adj)
print('A hipotenusa do respectivo triângulo retângulo vale {}.'.format(hip))
| 31
| 78
| 0.657568
| 54
| 403
| 4.907407
| 0.407407
| 0.150943
| 0.241509
| 0.143396
| 0.867925
| 0.867925
| 0.867925
| 0.867925
| 0.867925
| 0.867925
| 0
| 0.011662
| 0.148883
| 403
| 12
| 79
| 33.583333
| 0.760933
| 0.459057
| 0
| 0
| 0
| 0
| 0.44
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0.2
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee912485fe30cf6fd6906978f23e345c6918feae
| 3,286
|
py
|
Python
|
operations/fleet/migrations/0002_auto_20170605_1143.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/fleet/migrations/0002_auto_20170605_1143.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
operations/fleet/migrations/0002_auto_20170605_1143.py
|
kaizer88/emps
|
2669b32c46befcf1a19390fb25013817e6b00980
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('fleet', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='historicalvehicle',
old_name='end_date',
new_name='plan_ending',
),
migrations.RenameField(
model_name='historicalvehicle',
old_name='start_date',
new_name='plan_starting',
),
migrations.RenameField(
model_name='historicalvehicle',
old_name='rental_end_date',
new_name='rental_ending',
),
migrations.RenameField(
model_name='historicalvehicle',
old_name='rental_start_date',
new_name='rental_starting',
),
migrations.RenameField(
model_name='vehicle',
old_name='end_date',
new_name='plan_ending',
),
migrations.RenameField(
model_name='vehicle',
old_name='start_date',
new_name='plan_starting',
),
migrations.RenameField(
model_name='vehicle',
old_name='rental_end_date',
new_name='rental_ending',
),
migrations.RenameField(
model_name='vehicle',
old_name='rental_start_date',
new_name='rental_starting',
),
migrations.AddField(
model_name='historicalvehicle',
name='account_number',
field=models.CharField(default=None, max_length=120, null=True, blank=True),
),
migrations.AddField(
model_name='historicalvehicle',
name='lease_ending',
field=models.DateField(null=True, blank=True),
),
migrations.AddField(
model_name='historicalvehicle',
name='lease_starting',
field=models.DateField(null=True, blank=True),
),
migrations.AddField(
model_name='vehicle',
name='account_number',
field=models.CharField(default=None, max_length=120, null=True, blank=True),
),
migrations.AddField(
model_name='vehicle',
name='lease_ending',
field=models.DateField(null=True, blank=True),
),
migrations.AddField(
model_name='vehicle',
name='lease_starting',
field=models.DateField(null=True, blank=True),
),
migrations.AlterField(
model_name='historicalvehicle',
name='ownership_type',
field=models.CharField(blank=True, max_length=20, null=True, choices=[('EL Fleet', 'Emerald Life Fleet'), ('EL Leased', 'Emerald Life Leased'), ('EL Rental', 'Emerald Life Rental'), ('EL Staff', 'Emerald Life Staff')]),
),
migrations.AlterField(
model_name='vehicle',
name='ownership_type',
field=models.CharField(blank=True, max_length=20, null=True, choices=[('EL Fleet', 'Emerald Life Fleet'), ('EL Leased', 'Emerald Life Leased'), ('EL Rental', 'Emerald Life Rental'), ('EL Staff', 'Emerald Life Staff')]),
),
]
| 34.589474
| 231
| 0.567864
| 309
| 3,286
| 5.812298
| 0.187702
| 0.080178
| 0.115813
| 0.13363
| 0.878619
| 0.878619
| 0.859688
| 0.833519
| 0.778396
| 0.778396
| 0
| 0.006623
| 0.310712
| 3,286
| 94
| 232
| 34.957447
| 0.786313
| 0.006391
| 0
| 0.909091
| 0
| 0
| 0.225866
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.022727
| 0
| 0.056818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9a5568a7b6769b4b324a72070edfc82cdba06d7
| 6,375
|
py
|
Python
|
tests/test_file_apiv11.py
|
voxolab/voxo-dashboard
|
0593248328f0f47a4c1f00d1a10080ecc559a389
|
[
"MIT"
] | null | null | null |
tests/test_file_apiv11.py
|
voxolab/voxo-dashboard
|
0593248328f0f47a4c1f00d1a10080ecc559a389
|
[
"MIT"
] | null | null | null |
tests/test_file_apiv11.py
|
voxolab/voxo-dashboard
|
0593248328f0f47a4c1f00d1a10080ecc559a389
|
[
"MIT"
] | null | null | null |
import io
import json
import os
from flask import url_for
def test_file_upload_asr_model_name_api_v11(
app, users, client, asr_models):
# Test api v1.1
token = users['user'].get_auth_token()
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '{{"asr_model_name": "{model}"}}'
.format(model=asr_models['asr_model_french'].name),
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', token)])
assert rv.status_code == 200
result = json.loads(rv.data.decode("utf-8"))
assert result['status'] == 1
assert len(result['processes']) == 1
assert result['processes'][0]['type'] == 'Transcription with custom model'
assert result['processes'][0]['asr_model_name'] == \
asr_models['asr_model_french'].name
def test_file_upload_bad_asr_model_name_api_v11(
app, users, client, asr_models):
# Test api v1.1
token = users['user'].get_auth_token()
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '{"asr_model_name": "INVALID"}',
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', token)])
assert rv.status_code == 400
def test_file_upload_api_v11(app, user_token, client, asr_models):
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '{"start": true, "lang": "en", "quality":"phone"}',
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', user_token)])
assert rv.status_code == 200
result = json.loads(rv.data.decode("utf-8"))
assert result['status'] == 1
assert len(result['processes']) == 1
assert result['processes'][0]['type'] == 'Transcription with custom model'
assert result['processes'][0]['asr_model_name'] == 'english.studio'
file_basename, file_extension = \
os.path.splitext(result['generated_filename'])
path = "{}/{}/{}/{}".format(
app.config['UPLOAD_FOLDER'], result['user_id'],
file_basename, result['generated_filename'])
assert os.path.exists(path)
# Test api v1.1
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '{"start": true, "lang": "fr", "quality":"phone"}',
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', user_token)])
assert rv.status_code == 200
result = json.loads(rv.data.decode("utf-8"))
assert result['status'] == 1
assert len(result['processes']) == 1
assert result['processes'][0]['type'] == 'Transcription with custom model'
assert result['processes'][0]['asr_model_name'] == 'french.studio.fr_FR'
file_basename, file_extension = \
os.path.splitext(result['generated_filename'])
path = "{}/{}/{}/{}".format(
app.config['UPLOAD_FOLDER'], result['user_id'],
file_basename, result['generated_filename'])
assert os.path.exists(path)
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '{"start": true, "lang": "fr", "quality":"studio"}',
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', user_token)])
assert rv.status_code == 200
result = json.loads(rv.data.decode("utf-8"))
assert result['status'] == 1
assert len(result['processes']) == 1
assert result['processes'][0]['type'] == 'Transcription with custom model'
assert result['processes'][0]['asr_model_name'] == 'french.studio.fr_FR'
file_basename, file_extension = \
os.path.splitext(result['generated_filename'])
path = "{}/{}/{}/{}".format(
app.config['UPLOAD_FOLDER'], result['user_id'],
file_basename, result['generated_filename'])
assert os.path.exists(path)
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '{"start": false, "lang": "fr", "quality":"studio"}',
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', user_token)])
assert rv.status_code == 200
result = json.loads(rv.data.decode("utf-8"))
assert result['status'] == 1
assert len(result['processes']) == 0
file_basename, file_extension = \
os.path.splitext(result['generated_filename'])
path = "{}/{}/{}/{}".format(
app.config['UPLOAD_FOLDER'], result['user_id'], file_basename,
result['generated_filename'])
assert os.path.exists(path)
# This should not start any process
def test_file_upload_api_v11_fail(app, client, user_token):
rv = client.post(
url_for('api.upload_file', api_version='v1.1'),
content_type='multipart/form-data',
data={
'content': '',
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', user_token)])
assert rv.status_code == 500
result = json.loads(rv.data.decode("utf-8"))
assert 'parse valid json' in result['error']
# This should not start any process
def test_file_upload_api_v1(app, client, user_token):
rv = client.post(
url_for('api.upload_file'),
content_type='multipart/form-data',
data={
'file': (io.BytesIO(b"hello there"), 'hello.wav')
},
headers=[('Authentication-Token', user_token)])
assert rv.status_code == 200
result = json.loads(rv.data.decode("utf-8"))
assert result['status'] == 1
assert len(result['processes']) == 0
file_basename, file_extension = \
os.path.splitext(result['generated_filename'])
path = "{}/{}/{}/{}".format(
app.config['UPLOAD_FOLDER'], result['user_id'],
file_basename, result['generated_filename'])
assert os.path.exists(path)
| 30.502392
| 78
| 0.604078
| 780
| 6,375
| 4.758974
| 0.119231
| 0.045259
| 0.043103
| 0.032328
| 0.945313
| 0.945313
| 0.91514
| 0.91514
| 0.91514
| 0.905172
| 0
| 0.016093
| 0.220235
| 6,375
| 208
| 79
| 30.649038
| 0.730638
| 0.017098
| 0
| 0.763889
| 0
| 0
| 0.287061
| 0
| 0
| 0
| 0
| 0
| 0.236111
| 1
| 0.034722
| false
| 0
| 0.027778
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9ae1fb9a238ffdfab58d6cb872ceeb892e7abfe
| 25,627
|
py
|
Python
|
tests/test_video_dimensions.py
|
ephes/django-cast
|
34b6aab98f7e9a750116ec2949e9cda4f2dcb127
|
[
"BSD-3-Clause"
] | 11
|
2018-12-23T15:58:35.000Z
|
2021-10-04T12:14:46.000Z
|
tests/test_video_dimensions.py
|
ephes/django-cast
|
34b6aab98f7e9a750116ec2949e9cda4f2dcb127
|
[
"BSD-3-Clause"
] | 9
|
2018-11-18T12:12:29.000Z
|
2022-02-27T09:51:36.000Z
|
tests/test_video_dimensions.py
|
ephes/django-cast
|
34b6aab98f7e9a750116ec2949e9cda4f2dcb127
|
[
"BSD-3-Clause"
] | 12
|
2018-11-17T15:13:09.000Z
|
2020-05-02T00:10:07.000Z
|
# flake8: noqa:E501
from cast.models import get_video_dimensions
class TestVideoDimensions:
def test_video_from_ios_device_portrait(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/57304811027__F8E67B21-7B33-4C70-842D-D7E708DCA683.MOV':
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
creation_time : 2019-02-28T12:01:50.000000Z
com.apple.quicktime.make: Apple
com.apple.quicktime.model: iPhone XS
com.apple.quicktime.software: 12.1.4
com.apple.quicktime.creationdate: 2019-02-28T13:01:50+0100
Duration: 00:00:04.90, start: 0.000000, bitrate: 908 kb/s
Stream #0:0(und): Video: h264 (Baseline) (avc1 / 0x31637661), yuv420p(tv, smpte170m/bt709/bt709), 480x360, 778 kb/s, 30.01 fps, 30 tbr, 600 tbn, 1200 tbc (default)
Metadata:
rotate : 90
creation_time : 2019-02-28T12:01:50.000000Z
handler_name : Core Media Video
encoder : H.264
Side data:
displaymatrix: rotation of -90.00 degrees
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 91 kb/s (default)
Metadata:
creation_time : 2019-02-28T12:01:50.000000Z
handler_name : Core Media Audio
Stream #0:2(und): Data: none (mebx / 0x7862656D), 23 kb/s (default)
Metadata:
creation_time : 2019-02-28T12:01:50.000000Z
handler_name : Core Media Metadata
Stream #0:3(und): Data: none (mebx / 0x7862656D), 0 kb/s (default)
Metadata:
creation_time : 2019-02-28T12:01:50.000000Z
handler_name : Core Media Metadata
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
print("width x height: ", width, height)
assert width == 360
assert height == 480
def test_video_from_ios_device_landscape(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/57305469432__3391A666-61B4-4EEA-9373-119115C8DF9F.MOV':
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
creation_time : 2019-02-28T13:51:35.000000Z
com.apple.quicktime.make: Apple
com.apple.quicktime.model: iPhone XS
com.apple.quicktime.software: 12.1.4
com.apple.quicktime.creationdate: 2019-02-28T14:51:34+0100
Duration: 00:00:02.73, start: 0.000000, bitrate: 1087 kb/s
Stream #0:0(und): Video: h264 (Baseline) (avc1 / 0x31637661), yuv420p(tv, smpte170m/bt709/bt709), 480x360, 977 kb/s, 30 fps, 30 tbr, 600 tbn, 1200 tbc (default)
Metadata:
creation_time : 2019-02-28T13:51:35.000000Z
handler_name : Core Media Video
encoder : H.264
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 88 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:51:35.000000Z
handler_name : Core Media Audio
Stream #0:2(und): Data: none (mebx / 0x7862656D), 0 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:51:35.000000Z
handler_name : Core Media Metadata
Stream #0:3(und): Data: none (mebx / 0x7862656D), 0 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:51:35.000000Z
handler_name : Core Media Metadata
Stream mapping:
Stream #0:0 -> #0:0 (h264 (native) -> mjpeg (native))
Press [q] to stop, [?] for help
[swscaler @ 0x7fe8dd0c7600] deprecated pixel format used, make sure you did set range correctly
Output #0, image2, to '/var/folders/yq/lq6vnk9s693bp4xr5wktm1vh0000gn/T/poster_p5k4i2f3.jpg':
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
com.apple.quicktime.creationdate: 2019-02-28T14:51:34+0100
com.apple.quicktime.make: Apple
com.apple.quicktime.model: iPhone XS
com.apple.quicktime.software: 12.1.4
encoder : Lavf58.20.100
Stream #0:0(und): Video: mjpeg, yuvj420p(pc), 480x360, q=2-31, 200 kb/s, 30 fps, 30 tbn, 30 tbc (default)
Metadata:
creation_time : 2019-02-28T13:51:35.000000Z
handler_name : Core Media Video
encoder : Lavc58.35.100 mjpeg
Side data:
cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
frame= 1 fps=0.0 q=6.2 Lsize=N/A time=00:00:00.03 bitrate=N/A speed=2.47x
video:22kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
print("width x height: ", width, height)
assert width == 480
assert height == 360
def test_video_from_ios_portrait(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/E6B2E327-98BB-4348-8606-1E531D121BB3.MOV':
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
creation_time : 2019-02-28T13:55:07.000000Z
com.apple.quicktime.location.ISO6709: +51.2382+006.7899+036.689/
com.apple.quicktime.make: Apple
com.apple.quicktime.model: iPhone XS
com.apple.quicktime.software: 12.1.4
com.apple.quicktime.creationdate: 2019-02-28T14:55:06+0100
Duration: 00:00:03.57, start: 0.000000, bitrate: 28918 kb/s
Stream #0:0(und): Video: hevc (Main) (hvc1 / 0x31637668), yuv420p(tv, bt709), 3840x2160, 28726 kb/s, 29.97 fps, 29.97 tbr, 600 tbn, 600 tbc (default)
Metadata:
rotate : 90
creation_time : 2019-02-28T13:55:07.000000Z
handler_name : Core Media Video
encoder : HEVC
Side data:
displaymatrix: rotation of -90.00 degrees
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 168 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:55:07.000000Z
handler_name : Core Media Audio
Stream #0:2(und): Data: none (mebx / 0x7862656D), 0 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:55:07.000000Z
handler_name : Core Media Metadata
Stream #0:3(und): Data: none (mebx / 0x7862656D), 0 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:55:07.000000Z
handler_name : Core Media Metadata
Stream mapping:
Stream #0:0 -> #0:0 (hevc (native) -> mjpeg (native))
Press [q] to stop, [?] for help
[swscaler @ 0x7fdc29801000] deprecated pixel format used, make sure you did set range correctly
Output #0, image2, to '/var/folders/yq/lq6vnk9s693bp4xr5wktm1vh0000gn/T/poster_3w_mupml.jpg':
Metadata:
major_brand : qt
minor_version : 0
compatible_brands: qt
com.apple.quicktime.creationdate: 2019-02-28T14:55:06+0100
com.apple.quicktime.location.ISO6709: +51.2382+006.7899+036.689/
com.apple.quicktime.make: Apple
com.apple.quicktime.model: iPhone XS
com.apple.quicktime.software: 12.1.4
encoder : Lavf58.20.100
Stream #0:0(und): Video: mjpeg, yuvj420p(pc), 2160x3840, q=2-31, 200 kb/s, 29.97 fps, 29.97 tbn, 29.97 tbc (default)
Metadata:
encoder : Lavc58.35.100 mjpeg
creation_time : 2019-02-28T13:55:07.000000Z
handler_name : Core Media Video
Side data:
cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
displaymatrix: rotation of -0.00 degrees
frame= 1 fps=0.0 q=8.8 Lsize=N/A time=00:00:00.03 bitrate=N/A speed=0.0517x
video:211kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
print("width x height: ", width, height)
assert width == 2160
assert height == 3840
def test_video_from_android_portrait(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/VID_20190228_144156.mp4':
Metadata:
major_brand : mp42
minor_version : 0
compatible_brands: isommp42
creation_time : 2019-02-28T13:41:56.000000Z
location : +51.2380+006.7902/
location-eng : +51.2380+006.7902/
com.android.version: 9
Duration: 00:00:04.06, start: 0.000000, bitrate: 21343 kb/s
Stream #0:0(eng): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 190 kb/s (default)
Metadata:
creation_time : 2019-02-28T13:41:56.000000Z
handler_name : SoundHandle
Stream #0:1(eng): Video: h264 (High) (avc1 / 0x31637661), yuvj420p(pc, bt470bg/bt470bg/smpte170m), 1920x1080, 21917 kb/s, SAR 1:1 DAR 16:9, 29.88 fps, 30.13 tbr, 90k tbn, 180k tbc (default)
Metadata:
rotate : 270
creation_time : 2019-02-28T13:41:56.000000Z
handler_name : VideoHandle
Side data:
displaymatrix: rotation of 90.00 degrees
Stream mapping:
Stream #0:1 -> #0:0 (h264 (native) -> mjpeg (native))
Press [q] to stop, [?] for help
Output #0, image2, to '/var/folders/yq/lq6vnk9s693bp4xr5wktm1vh0000gn/T/poster_22_kvc0v.jpg':
Metadata:
major_brand : mp42
minor_version : 0
compatible_brands: isommp42
com.android.version: 9
location : +51.2380+006.7902/
location-eng : +51.2380+006.7902/
encoder : Lavf58.20.100
Stream #0:0(eng): Video: mjpeg, yuvj420p(pc), 1080x1920 [SAR 1:1 DAR 9:16], q=2-31, 200 kb/s, 30.13 fps, 30.13 tbn, 30.13 tbc (default)
Metadata:
encoder : Lavc58.35.100 mjpeg
creation_time : 2019-02-28T13:41:56.000000Z
handler_name : VideoHandle
Side data:
cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
displaymatrix: rotation of -0.00 degrees
frame= 1 fps=0.0 q=6.4 Lsize=N/A time=00:00:00.03 bitrate=N/A speed=0.263x
video:97kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
assert width == 1080
assert height == 1920
def test_video_from_android_landscape(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/VID_20190228_150527.mp4':
Metadata:
major_brand : mp42
minor_version : 0
compatible_brands: isommp42
creation_time : 2019-02-28T14:05:27.000000Z
location : +51.2380+006.7902/
location-eng : +51.2380+006.7902/
com.android.version: 9
Duration: 00:00:02.23, start: 0.000000, bitrate: 22175 kb/s
Stream #0:0(eng): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 193 kb/s (default)
Metadata:
creation_time : 2019-02-28T14:05:27.000000Z
handler_name : SoundHandle
Stream #0:1(eng): Video: h264 (High) (avc1 / 0x31637661), yuvj420p(pc, bt470bg/bt470bg/smpte170m), 1920x1080, 21968 kb/s, SAR 1:1 DAR 16:9, 30.01 fps, 30 tbr, 90k tbn, 180k tbc (default)
Metadata:
creation_time : 2019-02-28T14:05:27.000000Z
handler_name : VideoHandle
Stream mapping:
Stream #0:1 -> #0:0 (h264 (native) -> mjpeg (native))
Press [q] to stop, [?] for help
Output #0, image2, to '/var/folders/yq/lq6vnk9s693bp4xr5wktm1vh0000gn/T/poster_qcptirnt.jpg':
Metadata:
major_brand : mp42
minor_version : 0
compatible_brands: isommp42
com.android.version: 9
location : +51.2380+006.7902/
location-eng : +51.2380+006.7902/
encoder : Lavf58.20.100
Stream #0:0(eng): Video: mjpeg, yuvj420p(pc), 1920x1080 [SAR 1:1 DAR 16:9], q=2-31, 200 kb/s, 30 fps, 30 tbn, 30 tbc (default)
Metadata:
creation_time : 2019-02-28T14:05:27.000000Z
handler_name : VideoHandle
encoder : Lavc58.35.100 mjpeg
Side data:
cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
frame= 1 fps=0.0 q=6.4 Lsize=N/A time=00:00:00.03 bitrate=N/A speed=0.332x
video:75kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
assert width == 1920
assert height == 1080
def test_video_from_handbrake_landscape(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/IMG_0563_kqTcRSQ.mp4':
Metadata:
major_brand : mp42
minor_version : 512
compatible_brands: isomiso2avc1mp41
creation_time : 2019-02-28T11:33:03.000000Z
title : Ein langes Brillenvideo
date : 2018-08-21T12:28:17+0200
encoder : HandBrake 1.2.1 2019021700
Duration: 00:04:32.19, start: 0.000000, bitrate: 3104 kb/s
Stream #0:0(und): Video: h264 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080 [SAR 1:1 DAR 16:9], 2931 kb/s, 30 fps, 30 tbr, 90k tbn, 180k tbc (default)
Metadata:
creation_time : 2019-02-28T11:33:03.000000Z
handler_name : VideoHandler
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 164 kb/s (default)
Metadata:
creation_time : 2019-02-28T11:33:03.000000Z
handler_name : SoundHandler
Stream mapping:
Stream #0:0 -> #0:0 (h264 (native) -> mjpeg (native))
Press [q] to stop, [?] for help
[swscaler @ 0x7f8a4513ac00] deprecated pixel format used, make sure you did set range correctly
Output #0, image2, to '/var/folders/yq/lq6vnk9s693bp4xr5wktm1vh0000gn/T/poster_abfyafdr.jpg':
Metadata:
major_brand : mp42
minor_version : 512
compatible_brands: isomiso2avc1mp41
date : 2018-08-21T12:28:17+0200
title : Ein langes Brillenvideo
encoder : Lavf58.20.100
Stream #0:0(und): Video: mjpeg, yuvj420p(pc), 1920x1080 [SAR 1:1 DAR 16:9], q=2-31, 200 kb/s, 30 fps, 30 tbn, 30 tbc (default)
Metadata:
creation_time : 2019-02-28T11:33:03.000000Z
handler_name : VideoHandler
encoder : Lavc58.35.100 mjpeg
Side data:
cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
frame= 1 fps=0.0 q=8.6 Lsize=N/A time=00:00:00.03 bitrate=N/A speed=0.194x
video:84kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
assert width == 1920
assert height == 1080
def test_video_from_ios_fotos_landscape(self):
ffmpeg_output = """
ffmpeg version 4.1.1 Copyright (c) 2000-2019 the FFmpeg developers
built with Apple LLVM version 10.0.0 (clang-1000.11.45.5)
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/Users/jochen/projects/homepage/homepage/media/cast_videos/IMG_0563.m4v':
Metadata:
major_brand : M4V
minor_version : 1
compatible_brands: M4V M4A mp42isom
creation_time : 2019-02-28T11:29:47.000000Z
date : 2018-08-21T12:28:17+0200
title : Ein langes Brillenvideo
make : Apple
Duration: 00:04:32.11, start: 0.000000, bitrate: 10591 kb/s
Stream #0:0(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 88 kb/s (default)
Metadata:
creation_time : 2019-02-28T11:29:47.000000Z
handler_name : Core Media Audio
Stream #0:1(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080 [SAR 1:1 DAR 16:9], 10497 kb/s, 30.01 fps, 30 tbr, 600 tbn, 1200 tbc (default)
Metadata:
creation_time : 2019-02-28T11:29:47.000000Z
handler_name : Core Media Video
Stream mapping:
Stream #0:1 -> #0:0 (h264 (native) -> mjpeg (native))
Press [q] to stop, [?] for help
[swscaler @ 0x7f8115882e00] deprecated pixel format used, make sure you did set range correctly
Output #0, image2, to '/var/folders/yq/lq6vnk9s693bp4xr5wktm1vh0000gn/T/poster_5rsf6f9y.jpg':
Metadata:
major_brand : M4V
minor_version : 1
compatible_brands: M4V M4A mp42isom
make : Apple
date : 2018-08-21T12:28:17+0200
title : Ein langes Brillenvideo
encoder : Lavf58.20.100
Stream #0:0(und): Video: mjpeg, yuvj420p(pc), 1920x1080 [SAR 1:1 DAR 16:9], q=2-31, 200 kb/s, 30 fps, 30 tbn, 30 tbc (default)
Metadata:
creation_time : 2019-02-28T11:29:47.000000Z
handler_name : Core Media Video
encoder : Lavc58.35.100 mjpeg
Side data:
cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
frame= 1 fps=0.0 q=8.6 Lsize=N/A time=00:00:00.03 bitrate=N/A speed=0.269x
video:91kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
assert width == 1920
assert height == 1080
def test_video_from_handbrake_portrait(self):
ffmpeg_output = """
ffprobe version 4.1.3 Copyright (c) 2007-2019 the FFmpeg developers
built with Apple LLVM version 10.0.1 (clang-1001.0.46.4)
configuration: --prefix=/usr/local/Cellar/ffmpeg/4.1.3_1 --enable-shared --enable-pthreads --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags='-I/Library/Java/JavaVirtualMachines/adoptopenjdk-11.0.2.jdk/Contents/Home/include -I/Library/Java/JavaVirtualMachines/adoptopenjdk-11.0.2.jdk/Contents/Home/include/darwin' --host-ldflags= --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libbluray --enable-libmp3lame --enable-libopus --enable-librubberband --enable-libsnappy --enable-libtesseract --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libx265 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-librtmp --enable-libspeex --enable-videotoolbox --disable-libjack --disable-indev=jack --enable-libaom --enable-libsoxr
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'Michaelsbergschaukel.mp4':
Metadata:
major_brand : mp42
minor_version : 512
compatible_brands: isomiso2avc1mp41
creation_time : 2019-04-27T00:04:26.000000Z
title : Michaelsbergschaukel
date : 2018-09-22T14:58:10+0200
encoder : HandBrake 1.2.1 2019021700
Duration: 00:00:13.79, start: 0.000000, bitrate: 10086 kb/s
Stream #0:0(und): Video: h264 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080 [SAR 81:256 DAR 9:16], 9970 kb/s, 29.97 fps, 29.97 tbr, 90k tbn, 180k tbc (default)
Metadata:
creation_time : 2019-04-27T00:04:26.000000Z
handler_name : VideoHandler
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 162 kb/s (default)
Metadata:
creation_time : 2019-04-27T00:04:26.000000Z
handler_name : SoundHandler
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
print("width x height: ", width, height)
assert width == 1080
assert height == 1920
def test_video_from_handbrake_landscape(self):
ffmpeg_output = """
ffprobe version 4.1.3 Copyright (c) 2007-2019 the FFmpeg developers
built with Apple LLVM version 10.0.1 (clang-1001.0.46.4)
configuration: --prefix=/usr/local/Cellar/ffmpeg/4.1.3_1 --enable-shared --enable-pthreads --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags='-I/Library/Java/JavaVirtualMachines/adoptopenjdk-11.0.2.jdk/Contents/Home/include -I/Library/Java/JavaVirtualMachines/adoptopenjdk-11.0.2.jdk/Contents/Home/include/darwin' --host-ldflags= --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libbluray --enable-libmp3lame --enable-libopus --enable-librubberband --enable-libsnappy --enable-libtesseract --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libx265 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-librtmp --enable-libspeex --enable-videotoolbox --disable-libjack --disable-indev=jack --enable-libaom --enable-libsoxr
libavutil 56. 22.100 / 56. 22.100
libavcodec 58. 35.100 / 58. 35.100
libavformat 58. 20.100 / 58. 20.100
libavdevice 58. 5.100 / 58. 5.100
libavfilter 7. 40.101 / 7. 40.101
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 3.100 / 5. 3.100
libswresample 3. 3.100 / 3. 3.100
libpostproc 55. 3.100 / 55. 3.100
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'Tripptrappkuckuck.mp4':
Metadata:
major_brand : mp42
minor_version : 512
compatible_brands: isomiso2avc1mp41
creation_time : 2019-04-27T00:12:09.000000Z
title : Tripptrappkuckuck
date : 2018-09-25T21:25:49+0200
encoder : HandBrake 1.2.1 2019021700
Duration: 00:00:41.82, start: 0.000000, bitrate: 2030 kb/s
Stream #0:0(und): Video: h264 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080 [SAR 1:1 DAR 16:9], 1860 kb/s, 29.97 fps, 29.97 tbr, 90k tbn, 180k tbc (default)
Metadata:
creation_time : 2019-04-27T00:12:09.000000Z
handler_name : VideoHandler
Stream #0:1(und): Audio: aac (LC) (mp4a / 0x6134706D), 44100 Hz, stereo, fltp, 162 kb/s (default)
Metadata:
creation_time : 2019-04-27T00:12:09.000000Z
handler_name : SoundHandler
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
print("width x height: ", width, height)
assert width == 1920
assert height == 1080
def test_video_from_empty(self):
ffmpeg_output = """
foo bar baz
"""
width, height = get_video_dimensions(ffmpeg_output.split("\n"))
print("width x height: ", width, height)
assert width == None
assert height == None
| 49.282692
| 950
| 0.658329
| 3,846
| 25,627
| 4.326313
| 0.116745
| 0.007693
| 0.037502
| 0.035699
| 0.926678
| 0.922531
| 0.917844
| 0.911473
| 0.900895
| 0.890679
| 0
| 0.21302
| 0.220197
| 25,627
| 519
| 951
| 49.377649
| 0.619596
| 0.000663
| 0
| 0.824111
| 0
| 0.167984
| 0.907724
| 0.165456
| 0
| 0
| 0.011559
| 0
| 0.039526
| 1
| 0.019763
| false
| 0
| 0.001976
| 0
| 0.023715
| 0.011858
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a0150a1577e22cdfd50e490bb4a0c6b735bfcac8
| 46
|
py
|
Python
|
opentuner/resultsdb/__init__.py
|
jrk/opentuner
|
0e99e07665e50650bc382c83af3583903f868a6d
|
[
"MIT"
] | 39
|
2021-03-26T02:03:18.000Z
|
2022-03-23T17:39:17.000Z
|
opentuner/resultsdb/__init__.py
|
jrk/opentuner
|
0e99e07665e50650bc382c83af3583903f868a6d
|
[
"MIT"
] | 7
|
2020-05-12T22:38:04.000Z
|
2021-12-29T05:07:25.000Z
|
opentuner/resultsdb/__init__.py
|
jrk/opentuner
|
0e99e07665e50650bc382c83af3583903f868a6d
|
[
"MIT"
] | 6
|
2021-08-30T13:41:32.000Z
|
2022-03-23T20:32:30.000Z
|
from connect import connect
import models
| 6.571429
| 27
| 0.782609
| 6
| 46
| 6
| 0.666667
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 46
| 6
| 28
| 7.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4e52b13d05f4b0caefe667878659b25b80ec1398
| 397
|
py
|
Python
|
chirun/plastex/overrides/__init__.py
|
sthagen/chirun-ncl-chirun
|
45897319d5203b9867b5d6e00b2db1aa90a6580c
|
[
"Apache-2.0"
] | 5
|
2021-12-06T15:57:24.000Z
|
2022-01-24T20:34:00.000Z
|
chirun/plastex/overrides/__init__.py
|
sthagen/chirun-ncl-chirun
|
45897319d5203b9867b5d6e00b2db1aa90a6580c
|
[
"Apache-2.0"
] | 38
|
2021-12-09T13:16:46.000Z
|
2022-03-30T11:42:13.000Z
|
chirun/plastex/overrides/__init__.py
|
sthagen/chirun-ncl-chirun
|
45897319d5203b9867b5d6e00b2db1aa90a6580c
|
[
"Apache-2.0"
] | 1
|
2022-01-17T17:41:35.000Z
|
2022-01-17T17:41:35.000Z
|
from chirun.plastex.overrides.definitions import * # noqa: F401, F403
from chirun.plastex.overrides.labels import * # noqa: F401, F403
from chirun.plastex.overrides.lists import * # noqa: F401, F403
from chirun.plastex.overrides.macros import * # noqa: F401, F403
from chirun.plastex.overrides.math import * # noqa: F401, F403
from chirun.plastex.overrides.other import * # noqa: F401, F403
| 56.714286
| 70
| 0.758186
| 54
| 397
| 5.574074
| 0.259259
| 0.199336
| 0.33887
| 0.518272
| 0.730897
| 0.730897
| 0.730897
| 0.730897
| 0
| 0
| 0
| 0.104956
| 0.13602
| 397
| 6
| 71
| 66.166667
| 0.772595
| 0.254408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
4e9a9c8c488eaa8a04b0c2913de3e5f02f3536a6
| 68,596
|
py
|
Python
|
benchmarks/SimResults/micro_pinned_train_combos/cmpD_bwavesgccmcfhmmer/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpD_bwavesgccmcfhmmer/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/micro_pinned_train_combos/cmpD_bwavesgccmcfhmmer/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.108043,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.28755,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.578713,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.289529,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.501359,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.287544,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.07843,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.197461,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.296,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.109331,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0104956,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.116539,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0776217,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.225871,
'Execution Unit/Register Files/Runtime Dynamic': 0.0881174,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.311207,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.769877,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 2.62935,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000601215,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000601215,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00051972,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000199039,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00111504,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00283719,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00590507,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0746198,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.74646,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.172882,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.253442,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.19885,
'Instruction Fetch Unit/Runtime Dynamic': 0.509687,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.100402,
'L2/Runtime Dynamic': 0.0085628,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.17947,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.42353,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0951922,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0951922,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 4.63082,
'Load Store Unit/Runtime Dynamic': 1.98818,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.234728,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.469456,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0833056,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0848095,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.295117,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0283537,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.597635,
'Memory Management Unit/Runtime Dynamic': 0.113163,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 23.3854,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.381432,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0193948,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.144547,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.545374,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 5.79432,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.0,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0728353,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.117481,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0593003,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.249616,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0833028,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 3.98573,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00305504,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0220919,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0225939,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0220919,
'Execution Unit/Register Files/Runtime Dynamic': 0.0256489,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0465414,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.135673,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.019,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000383741,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000383741,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000337343,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000132289,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000324563,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00142939,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00356833,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0217201,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.38158,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0424862,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0737712,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.66715,
'Instruction Fetch Unit/Runtime Dynamic': 0.142975,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0269326,
'L2/Runtime Dynamic': 0.0063493,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.08485,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.416622,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.027426,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0274261,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.21436,
'Load Store Unit/Runtime Dynamic': 0.579305,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0676279,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.135256,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0240014,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0244055,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0859017,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0069662,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.283241,
'Memory Management Unit/Runtime Dynamic': 0.0313717,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.7669,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00328613,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0383656,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0416518,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.82066,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 2.83407e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202691,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.51802e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.519301,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.837614,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.422799,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.77971,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.593928,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.98559,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 2.86787e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0217818,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.157511,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.16109,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.157514,
'Execution Unit/Register Files/Runtime Dynamic': 0.182872,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.331832,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 1.13844,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 3.7091,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00107708,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00107708,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000934357,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000359639,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00231407,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00540258,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0104618,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.15486,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.337083,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.525974,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96396,
'Instruction Fetch Unit/Runtime Dynamic': 1.03378,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0433965,
'L2/Runtime Dynamic': 0.029639,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 8.22925,
'Load Store Unit/Data Cache/Runtime Dynamic': 3.39972,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.226212,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.226212,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 9.29747,
'Load Store Unit/Runtime Dynamic': 4.74153,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.5578,
'Load Store Unit/StoreQ/Runtime Dynamic': 1.1156,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.197965,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.198614,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0552692,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.896173,
'Memory Management Unit/Runtime Dynamic': 0.253883,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 27.7761,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 8.11441e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0234295,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.277456,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.300894,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 10.0688,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 1.88938e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 7.59011e-06,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.131433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.211996,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.107008,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.450437,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.150319,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.11697,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.43393e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00551287,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0398656,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0407711,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.039867,
'Execution Unit/Register Files/Runtime Dynamic': 0.0462839,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0839861,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.2157,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.32049,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0018948,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0018948,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.0017051,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00069,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000585679,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00608038,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.016212,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0391943,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.49309,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.148792,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.133121,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.8326,
'Instruction Fetch Unit/Runtime Dynamic': 0.3434,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0309888,
'L2/Runtime Dynamic': 0.00725973,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.4281,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.582997,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.038531,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0385309,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.61005,
'Load Store Unit/Runtime Dynamic': 0.811549,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0950109,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.190021,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0337197,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0340593,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.155011,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0247648,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.369044,
'Memory Management Unit/Runtime Dynamic': 0.058824,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.5491,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 4.18536e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00592992,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0666757,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0726098,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.61413,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 7.693560777365681,
'Runtime Dynamic': 7.693560777365681,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.326122,
'Runtime Dynamic': 0.1017,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 80.8036,
'Peak Power': 113.916,
'Runtime Dynamic': 20.3996,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 80.4775,
'Total Cores/Runtime Dynamic': 20.2979,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.326122,
'Total L3s/Runtime Dynamic': 0.1017,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.050328
| 124
| 0.681906
| 8,090
| 68,596
| 5.77602
| 0.067244
| 0.123609
| 0.112994
| 0.093477
| 0.939758
| 0.930577
| 0.918764
| 0.888118
| 0.863059
| 0.842429
| 0
| 0.131435
| 0.224372
| 68,596
| 914
| 125
| 75.050328
| 0.746828
| 0
| 0
| 0.642232
| 0
| 0
| 0.657536
| 0.048107
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14d759f6eacf8be6ba0aef8643db10189cebf573
| 2,015
|
py
|
Python
|
tests/core/test_provider_payment_execution.py
|
lassejaco/pretix-eth-payment-plugin
|
be514a7387de8399cb11c9dd8971f286ccc9a72c
|
[
"Apache-2.0"
] | null | null | null |
tests/core/test_provider_payment_execution.py
|
lassejaco/pretix-eth-payment-plugin
|
be514a7387de8399cb11c9dd8971f286ccc9a72c
|
[
"Apache-2.0"
] | 1
|
2020-05-01T22:16:35.000Z
|
2020-05-04T20:32:38.000Z
|
tests/core/test_provider_payment_execution.py
|
lassejaco/pretix-eth-payment-plugin
|
be514a7387de8399cb11c9dd8971f286ccc9a72c
|
[
"Apache-2.0"
] | 1
|
2020-04-30T22:15:10.000Z
|
2020-04-30T22:15:10.000Z
|
import time
from django.contrib.sessions.backends.db import SessionStore
from django.test import RequestFactory
import pytest
@pytest.mark.django_db
def test_provider_execute_successful_payment_in_ETH(provider, get_order_and_payment):
order, payment = get_order_and_payment()
assert order.status == order.STATUS_PENDING
assert payment.state == payment.PAYMENT_STATE_PENDING
provider.settings.set('ETH_RATE', '0.004')
factory = RequestFactory()
session = SessionStore()
session.create()
# setup all the necessary session data for the payment to be valid
session['payment_ethereum_currency_type'] = 'ETH'
session['payment_ethereum_time'] = int(time.time()) - 10
session['payment_ethereum_amount'] = 100
request = factory.get('/checkout')
request.event = provider.event
request.session = session
provider.execute_payment(request, payment)
order.refresh_from_db()
payment.refresh_from_db()
assert order.status == order.STATUS_PENDING
assert payment.state == payment.PAYMENT_STATE_PENDING
@pytest.mark.django_db
def test_provider_execute_successful_payment_in_DAI(provider, get_order_and_payment):
order, payment = get_order_and_payment()
assert order.status == order.STATUS_PENDING
assert payment.state == payment.PAYMENT_STATE_PENDING
provider.settings.set('DAI_RATE', '0.004')
factory = RequestFactory()
session = SessionStore()
session.create()
# setup all the necessary session data for the payment to be valid
session['payment_ethereum_currency_type'] = 'DAI'
session['payment_ethereum_time'] = int(time.time()) - 10
session['payment_ethereum_amount'] = 100
request = factory.get('/checkout')
request.event = provider.event
request.session = session
provider.execute_payment(request, payment)
order.refresh_from_db()
payment.refresh_from_db()
assert order.status == order.STATUS_PENDING
assert payment.state == payment.PAYMENT_STATE_PENDING
| 29.632353
| 85
| 0.744913
| 251
| 2,015
| 5.7251
| 0.223108
| 0.061239
| 0.091858
| 0.050104
| 0.911621
| 0.911621
| 0.911621
| 0.911621
| 0.911621
| 0.911621
| 0
| 0.010651
| 0.16129
| 2,015
| 67
| 86
| 30.074627
| 0.839645
| 0.06402
| 0
| 0.772727
| 0
| 0
| 0.105151
| 0.078598
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.045455
| false
| 0
| 0.090909
| 0
| 0.136364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
090f27702693d70e44d98fd8ae251051575b30f1
| 129
|
py
|
Python
|
examples/zh-cn/api/captcha/__init__.py
|
yulix/restful-api-contract
|
fe1a9364f295b79dd668d3a82f9ff2c7ff1b6618
|
[
"MIT"
] | null | null | null |
examples/zh-cn/api/captcha/__init__.py
|
yulix/restful-api-contract
|
fe1a9364f295b79dd668d3a82f9ff2c7ff1b6618
|
[
"MIT"
] | null | null | null |
examples/zh-cn/api/captcha/__init__.py
|
yulix/restful-api-contract
|
fe1a9364f295b79dd668d3a82f9ff2c7ff1b6618
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
captcha_api = Blueprint('captcha_api', __name__, url_prefix='/api/captcha')
from .captcha import *
| 21.5
| 75
| 0.775194
| 17
| 129
| 5.470588
| 0.529412
| 0.344086
| 0.408602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 129
| 5
| 76
| 25.8
| 0.815789
| 0
| 0
| 0
| 0
| 0
| 0.178295
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
094d66e21d7c6a1554de2d2f8da0b7b29ecb440c
| 183
|
py
|
Python
|
uoffice/apis/__init__.py
|
FLHCoLtd/uoffice
|
305a9d709321a54bf3827dd8c7779058e0c63e06
|
[
"MIT"
] | null | null | null |
uoffice/apis/__init__.py
|
FLHCoLtd/uoffice
|
305a9d709321a54bf3827dd8c7779058e0c63e06
|
[
"MIT"
] | null | null | null |
uoffice/apis/__init__.py
|
FLHCoLtd/uoffice
|
305a9d709321a54bf3827dd8c7779058e0c63e06
|
[
"MIT"
] | 1
|
2022-03-31T03:39:15.000Z
|
2022-03-31T03:39:15.000Z
|
from .cron_handlers import NotifyUofCleanDutyHandler
from .cron_handlers import ListEnvVarsHandler
from .cron_handlers import QueryOnDutyHandler
from .echo_handler import EchoHandler
| 36.6
| 52
| 0.89071
| 20
| 183
| 7.95
| 0.5
| 0.150943
| 0.301887
| 0.415094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087432
| 183
| 4
| 53
| 45.75
| 0.952096
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1179c4f3cff4aeb9ed2b432b811c36ee67ef8da6
| 3,298
|
py
|
Python
|
Processing/sort_excluded_files.py
|
jpazdera/PazdKaha22
|
9b3157cbcc68aafc829dbd38f3271f884caf541d
|
[
"CC-BY-4.0"
] | null | null | null |
Processing/sort_excluded_files.py
|
jpazdera/PazdKaha22
|
9b3157cbcc68aafc829dbd38f3271f884caf541d
|
[
"CC-BY-4.0"
] | null | null | null |
Processing/sort_excluded_files.py
|
jpazdera/PazdKaha22
|
9b3157cbcc68aafc829dbd38f3271f884caf541d
|
[
"CC-BY-4.0"
] | null | null | null |
import os
def sort_excluded_files():
with open('/data/eeg/scalp/ltp/ltpFR3_MTurk/EXCLUDED.txt', 'r') as f:
exc = [s.strip() for s in f.readlines()]
with open('/data/eeg/scalp/ltp/ltpFR3_MTurk/BAD_SESS.txt', 'r') as f:
bad_sess = [s.strip() for s in f.readlines()]
with open('/data/eeg/scalp/ltp/ltpFR3_MTurk/REJECTED.txt', 'r') as f:
rej = [s.strip() for s in f.readlines()]
for subj in exc:
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/events/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/events/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/events/excluded/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/data/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/data/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/data/excluded/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/excluded/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/%s.pdf' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/%s.pdf' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/excluded/%s.pdf' % subj)
for subj in bad_sess:
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/events/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/events/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/events/bad_sess/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/data/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/data/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/data/bad_sess/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/bad_sess/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/%s.pdf' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/%s.pdf' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/bad_sess/%s.pdf' % subj)
for subj in rej:
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/events/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/events/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/events/rejected/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/data/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/data/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/data/rejected/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/%s.json' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/%s.json' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/stats/rejected/%s.json' % subj)
if os.path.exists('/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/%s.pdf' % subj):
os.rename('/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/%s.pdf' % subj, '/data/eeg/scalp/ltp/ltpFR3_MTurk/reports/rejected/%s.pdf' % subj)
if __name__ == "__main__":
sort_excluded_files()
| 70.170213
| 146
| 0.661916
| 538
| 3,298
| 3.949814
| 0.074349
| 0.128471
| 0.220235
| 0.275294
| 0.922353
| 0.922353
| 0.906353
| 0.896
| 0.88
| 0.88
| 0
| 0.01383
| 0.144936
| 3,298
| 46
| 147
| 71.695652
| 0.739716
| 0
| 0
| 0.324324
| 0
| 0
| 0.581868
| 0.578532
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.027027
| 0
| 0.054054
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
117b17b65cb2f62cbae7e4e8dfc8886c933aa72c
| 5,053
|
py
|
Python
|
tests/parser/21-Complex-Optimization-of-Answer-Sets.asp.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/21-Complex-Optimization-of-Answer-Sets.asp.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/21-Complex-Optimization-of-Answer-Sets.asp.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
eleb(P) :- rule(_,pos(P)).
nhold(conjunction(S)) :- eleb(conjunction(S)), not hold(P), set(S,pos(P)).
nhold(conjunction(S)) :- eleb(conjunction(S)), hold(P), set(S,neg(P)).
hold(conjunction(S)) :- eleb(conjunction(S)), not nhold(conjunction(S)).
hold(atom(A)) :- rule(pos(atom(A)), pos(B)), hold(B).
:- rule(pos(false), pos(B)), hold(B).
{ hold(P) : wlist(S,_,pos(P),_) } :- rule(pos(sum(_,S,_)),pos(B)), hold(B).
elem(E) :- eleb(E).
elem(E) :- rule(pos(E),_).
elem(P) :- rule(pos(sum(_,S,_)),_), wlist(S,_,pos(P),_).
elem(P) :- minimize(J,S), wlist(S,_,pos(P),W).
supp(atom(A),B) :- rule(pos(atom(A)), pos(B)).
supp(atom(A),B) :- rule(pos(sum(_,S,_)),pos(B)), wlist(S,_,pos(atom(A)),_).
supp(atom(A)) :- supp(atom(A),B).
set(S) :- set(S,E).
fact(atom(A)) :- rule(pos(atom(A)),pos(conjunction(S))), not set(S).
true(atom(A)) :- fact(atom(A)).
true(atom(A)) | fail(atom(A)) :- supp(atom(A)), not fact(atom(A)).
fail(atom(A)) :- elem(atom(A)), not supp(atom(A)).
fail(false).
sett(S,0,P) :- set(S,P).
sett(S,N+1,P2) :- sett(S,N,P1), sett(S,N,P2), P1 < P2.
setn(S,N,P) :- sett(S,N,P), not sett(S,N+1,P).
setn(S,N) :- setn(S,N,_).
true(conjunction(S),N+1) :- elem(conjunction(S)), setn(S,N), not setn(S,N+1).
true(conjunction(S),N) :- elem(conjunction(S)), true(P), setn(S,N,pos(P)), true(conjunction(S),N+1).
true(conjunction(S),N) :- elem(conjunction(S)), fail(P), setn(S,N,neg(P)), true(conjunction(S),N+1).
true(conjunction(S)) :- true(conjunction(S),0).
fail(conjunction(S)) :- elem(conjunction(S)), set(S,pos(P)), fail(P).
fail(conjunction(S)) :- elem(conjunction(S)), set(S,neg(N)), true(N).
suppt(S,0,P) :- supp(S,P).
suppt(S,N+1,P2) :- suppt(S,N,P1), suppt(S,N,P2), P1 < P2.
suppn(S,N,P) :- suppt(S,N,P), not suppt(S,N+1,P).
suppn(S,N) :- suppn(S,N,_).
suppf(S,N+1) :- suppn(S,N), not suppn(S,N+1).
suppf(S,N) :- fail(P), suppn(S,N,P), suppf(S,N+1).
bot :- true(atom(A)), suppf(atom(A),0).
bot :- rule(pos(H),pos(B)), true(B), fail(H).
true(atom(A)) :- supp(atom(A)), not fact(atom(A)), bot.
fail(atom(A)) :- supp(atom(A)), not fact(atom(A)), bot.
target(P,N) :- minimize(_,S), wlist(S,N,pos(P),_).
target(N) :- target(P,N).
equal(N+1) :- target(N), not target(N+1).
equal(N) :- target(P,N), true(P), hold(P), equal(N+1).
equal(N) :- target(P,N), fail(P), not hold(P), equal(N+1).
bot :- equal(0).
bot :- target(P,_), true(P), not hold(P).
:- not bot.
"""
output = """
eleb(P) :- rule(_,pos(P)).
nhold(conjunction(S)) :- eleb(conjunction(S)), not hold(P), set(S,pos(P)).
nhold(conjunction(S)) :- eleb(conjunction(S)), hold(P), set(S,neg(P)).
hold(conjunction(S)) :- eleb(conjunction(S)), not nhold(conjunction(S)).
hold(atom(A)) :- rule(pos(atom(A)), pos(B)), hold(B).
:- rule(pos(false), pos(B)), hold(B).
{ hold(P) : wlist(S,_,pos(P),_) } :- rule(pos(sum(_,S,_)),pos(B)), hold(B).
elem(E) :- eleb(E).
elem(E) :- rule(pos(E),_).
elem(P) :- rule(pos(sum(_,S,_)),_), wlist(S,_,pos(P),_).
elem(P) :- minimize(J,S), wlist(S,_,pos(P),W).
supp(atom(A),B) :- rule(pos(atom(A)), pos(B)).
supp(atom(A),B) :- rule(pos(sum(_,S,_)),pos(B)), wlist(S,_,pos(atom(A)),_).
supp(atom(A)) :- supp(atom(A),B).
set(S) :- set(S,E).
fact(atom(A)) :- rule(pos(atom(A)),pos(conjunction(S))), not set(S).
true(atom(A)) :- fact(atom(A)).
true(atom(A)) | fail(atom(A)) :- supp(atom(A)), not fact(atom(A)).
fail(atom(A)) :- elem(atom(A)), not supp(atom(A)).
fail(false).
sett(S,0,P) :- set(S,P).
sett(S,N+1,P2) :- sett(S,N,P1), sett(S,N,P2), P1 < P2.
setn(S,N,P) :- sett(S,N,P), not sett(S,N+1,P).
setn(S,N) :- setn(S,N,_).
true(conjunction(S),N+1) :- elem(conjunction(S)), setn(S,N), not setn(S,N+1).
true(conjunction(S),N) :- elem(conjunction(S)), true(P), setn(S,N,pos(P)), true(conjunction(S),N+1).
true(conjunction(S),N) :- elem(conjunction(S)), fail(P), setn(S,N,neg(P)), true(conjunction(S),N+1).
true(conjunction(S)) :- true(conjunction(S),0).
fail(conjunction(S)) :- elem(conjunction(S)), set(S,pos(P)), fail(P).
fail(conjunction(S)) :- elem(conjunction(S)), set(S,neg(N)), true(N).
suppt(S,0,P) :- supp(S,P).
suppt(S,N+1,P2) :- suppt(S,N,P1), suppt(S,N,P2), P1 < P2.
suppn(S,N,P) :- suppt(S,N,P), not suppt(S,N+1,P).
suppn(S,N) :- suppn(S,N,_).
suppf(S,N+1) :- suppn(S,N), not suppn(S,N+1).
suppf(S,N) :- fail(P), suppn(S,N,P), suppf(S,N+1).
bot :- true(atom(A)), suppf(atom(A),0).
bot :- rule(pos(H),pos(B)), true(B), fail(H).
true(atom(A)) :- supp(atom(A)), not fact(atom(A)), bot.
fail(atom(A)) :- supp(atom(A)), not fact(atom(A)), bot.
target(P,N) :- minimize(_,S), wlist(S,N,pos(P),_).
target(N) :- target(P,N).
equal(N+1) :- target(N), not target(N+1).
equal(N) :- target(P,N), true(P), hold(P), equal(N+1).
equal(N) :- target(P,N), fail(P), not hold(P), equal(N+1).
bot :- equal(0).
bot :- target(P,_), true(P), not hold(P).
:- not bot.
"""
| 34.848276
| 102
| 0.54997
| 958
| 5,053
| 2.856994
| 0.043841
| 0.046767
| 0.024114
| 0.047497
| 0.995981
| 0.995981
| 0.995981
| 0.995981
| 0.995981
| 0.995981
| 0
| 0.013957
| 0.149218
| 5,053
| 144
| 103
| 35.090278
| 0.622703
| 0
| 0
| 0.98
| 0
| 0.52
| 0.993865
| 0.205818
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f5022f392a96c77a43bc1b3615ae97c9c2c1d1ee
| 18,798
|
py
|
Python
|
adrian_code/publication_jif_plots.py
|
senthil10/dc_reporting_scripts
|
9d7d75a71a7f3cd28bec2e0dc3ea1dd92b0018b0
|
[
"MIT"
] | null | null | null |
adrian_code/publication_jif_plots.py
|
senthil10/dc_reporting_scripts
|
9d7d75a71a7f3cd28bec2e0dc3ea1dd92b0018b0
|
[
"MIT"
] | null | null | null |
adrian_code/publication_jif_plots.py
|
senthil10/dc_reporting_scripts
|
9d7d75a71a7f3cd28bec2e0dc3ea1dd92b0018b0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Adrian Lärkeryd <adrian.larkeryd@scilifelab.uu.se>
# Plotting libs
import plotly
import plotly.graph_objs as go
# My own files
from colour_science import SCILIFE_COLOURS
from issn_files import ISSN_IMPACT_2019, ISSN_IMPACT_2017, ISSN_IMPACT_2016, ISSN_IMPACT_2015, ISSN_TO_ISSNL, ISSNL_TO_ISSN, issn_to_impact
from publications_api import Publications_api
# Igonre publications if they have only these labels
def check_valid_label(plabels):
ignore_list = ['Centre for Cellular Imaging', 'Intravital Microscopy Facility', 'Biochemical Imaging Centre Umeå',
'Advanced FISH Technologies', 'National Resource for Mass Spectrometry Imaging',
'Gothenburg Imaging Mass Spectrometry', 'Glycoproteomics', 'Targeted and Structural Proteomics',
'AIDA Data Hub', 'Clinical Genomics Linköping', 'Clinical Genomics Umeå', 'Clinical Genomics Örebro']
return any([(l not in ignore_list) for l in plabels])
print "PUBLICATION PLOTS..."
#allyrs = ["2010", "2011", "2012", "2013", "2014", "2015", "2016", "2017", "2018", "2019"]
#years_1015 = ["2010", "2011", "2012", "2013", "2014", "2015", "2016"]
allyrs = ["2013", "2014", "2015", "2016", "2017", "2018", "2019"]
years_1015 = ["2013", "2014", "2015", "2016"]
years_161718 = ["2017", "2018", "2019"]
pub_getter_1015 = Publications_api(years=years_1015)
pub_allyrs = pub_getter_1015.get_publications()
aff_allyrs = pub_getter_1015.get_publications_affiliated()
print "Sources:", pub_getter_1015.source_links
pub_getter_161718 = Publications_api(years=years_161718)
pub_161718 = pub_getter_161718.get_publications()
aff_161718 = pub_getter_161718.get_publications_affiliated()
#[pub['labels'].keys() for pub in pub_161718 if pub['published'].split('-')[0] == '2019' and check_valid_label(pub['labels'].keys()) and check_date < datetime.datetime.strptime(pub['created'].split('T')[0], '%Y-%M-%d')]
pub_allyrs += pub_161718
aff_allyrs += aff_161718
print "Sources:", pub_getter_1015.source_links, pub_getter_161718.source_links
pub_aff_161718 = pub_161718 + aff_161718
pub_aff_allyrs = pub_allyrs + aff_allyrs
# pub_getter_2019 = Publications_api(years=["2019"])
# pub_2019 = pub_getter_2019.get_publications()
# new_label_pub = []
# p19_read = []
# p19_uniq = []
# for p19 in pub_2019:
# if p19["doi"] in p19_read:
# continue
# if not check_valid_label(p19["labels"]):
# new_label_pub.append(p19["doi"])
# p19_read.append(p19["doi"])
# p19_uniq.append(p19["doi"])
# import pdb; pdb.set_trace()
if False:
publication_dois = list()
publication_issns = list()
publication_impacts = {"2013": [], "2014": [], "2015": [], "2016": [], "2017": [], "2018": [], "2019": []}
for pub in pub_aff_allyrs:
if pub["doi"] in publication_dois or not check_valid_label(pub["labels"]):
continue
year = pub["published"].split("-")[0]
if pub["journal"]["issn"]:
issn = pub["journal"]["issn"]
publication_issns.append(issn)
impact = issn_to_impact(issn)
if impact is None:
print "NO IMPACT FACTOR FOUND FOR:", issn, pub["journal"]
# At the end, add the impact to the list
publication_impacts[year].append(impact)
jifflag = True
else:
# NO ISSN
publication_impacts[year].append(None)
jifflag = True
print "NO ISSN FOUND FOR:", issn, pub["journal"]
jif_data = {"2013": [0,0,0,0,0], "2014": [0,0,0,0,0], "2015": [0,0,0,0,0], "2016": [0,0,0,0,0], "2017": [0,0,0,0,0], "2018": [0,0,0,0,0], "2019": [0,0,0,0,0]}
for year in publication_impacts.keys():
for impact in publication_impacts[year]:
if impact is not None:
real_impact = float(impact)/1000
#print real_impact
if real_impact>25.0:
jif_data[year][3] += 1
continue
if real_impact>9.0:
jif_data[year][2] += 1
continue
if real_impact>6.0:
jif_data[year][1] += 1
continue
jif_data[year][0] += 1
else:
jif_data[year][4] += 1
jif_unknown = go.Bar(
x=allyrs,
y=[jif_data["2013"][4], jif_data["2014"][4], jif_data["2015"][4], jif_data["2016"][4], jif_data["2017"][4], jif_data["2018"][4], jif_data["2019"][4]],
name="JIF unknown",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[5],
line=dict(
color='#000000',
width=1.5)
)
)
jif_low = go.Bar(
x=allyrs,
y=[jif_data["2013"][0], jif_data["2014"][0], jif_data["2015"][0], jif_data["2016"][0], jif_data["2017"][0], jif_data["2018"][0], jif_data["2019"][0]],
name="JIF < 6",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[0],
line=dict(
color='#000000',
width=1.5)
)
)
jif_mediocre = go.Bar(
x=allyrs,
y=[jif_data["2013"][1], jif_data["2014"][1], jif_data["2015"][1], jif_data["2016"][1], jif_data["2017"][1], jif_data["2018"][1], jif_data["2019"][1]],
name="JIF = 6 - 9",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[7],
line=dict(
color='#000000',
width=1.5)
)
)
jif_good = go.Bar(
x=allyrs,
y=[jif_data["2013"][2], jif_data["2014"][2], jif_data["2015"][2], jif_data["2016"][2], jif_data["2017"][2], jif_data["2018"][2], jif_data["2019"][2]],
name="JIF = 9 - 25",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[9],
line=dict(
color='#000000',
width=1.5)
)
)
jif_high = go.Bar(
x=allyrs,
y=[jif_data["2013"][3], jif_data["2014"][3], jif_data["2015"][3], jif_data["2016"][3], jif_data["2017"][3], jif_data["2018"][3], jif_data["2019"][3]],
name="JIF > 25",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[1],
line=dict(
color='#000000',
width=1.5)
)
)
layout = go.Layout(
barmode="stack",
plot_bgcolor='rgba(0,0,0,0)',
width=1200,
height=600,
margin=go.layout.Margin(
l=100,
r=100,
b=100,
t=30,
pad=4
),
xaxis=dict(
showticklabels=True,
dtick=1,
zeroline=True,
tickfont=dict(
family='Arial',
size=28,
color='#000000'
)
),
yaxis=dict(
showticklabels=True,
gridcolor="#E2E5E0",
tickfont=dict(
family='Arial',
size=28,
color='#000000'
)
),
legend=dict(
traceorder='normal',
font=dict(
family='Arial',
size=20,
color='#000'
)
)
)
data = [jif_unknown, jif_low, jif_mediocre, jif_good, jif_high]
fig = go.Figure(data=data, layout=layout)
# plotly.io.write_image(fig, 'facility_onepagers_figures/{}_jif.png'.format(label["value"].lower().replace(" ", "_")))
# plotly.io.write_image(fig, 'facility_onepagers_figures/{}_jif.pdf'.format(label["value"].lower().replace(" ", "_")))
plotly.io.write_image(fig, 'jif_fac_and_aff_allyrs.png', scale=5)
publication_dois = list()
publication_issns = list()
publication_impacts = {"2013": [], "2014": [], "2015": [], "2016": [], "2017": [], "2018": [], "2019": []}
for pub in pub_allyrs:
if pub["doi"] in publication_dois or not check_valid_label(pub["labels"]):
continue
publication_dois.append(pub["doi"])
year = pub["published"].split("-")[0]
if pub["journal"]["issn"]:
issn = pub["journal"]["issn"]
publication_issns.append(issn)
impact = issn_to_impact(issn)
if impact is None:
print "NO IMPACT FACTOR FOUND FOR:", issn, pub["journal"]
# At the end, add the impact to the list
publication_impacts[year].append(impact)
jifflag = True
else:
# NO ISSN
publication_impacts[year].append(None)
jifflag = True
print "NO ISSN FOUND FOR:", issn, pub["journal"]
jif_data = {"2013": [0,0,0,0,0], "2014": [0,0,0,0,0], "2015": [0,0,0,0,0], "2016": [0,0,0,0,0], "2017": [0,0,0,0,0], "2018": [0,0,0,0,0], "2019": [0,0,0,0,0]}
for year in publication_impacts.keys():
for impact in publication_impacts[year]:
if impact is not None:
real_impact = float(impact)/1000
#print real_impact
if real_impact>25.0:
jif_data[year][3] += 1
continue
if real_impact>9.0:
jif_data[year][2] += 1
continue
if real_impact>6.0:
jif_data[year][1] += 1
continue
jif_data[year][0] += 1
else:
jif_data[year][4] += 1
jif_unknown = go.Bar(
x=allyrs,
y=[jif_data["2013"][4], jif_data["2014"][4], jif_data["2015"][4], jif_data["2016"][4], jif_data["2017"][4], jif_data["2018"][4], jif_data["2019"][4]],
name="JIF unknown",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[5],
line=dict(
color='#000000',
width=1.5)
)
)
jif_low = go.Bar(
x=allyrs,
y=[jif_data["2013"][0], jif_data["2014"][0], jif_data["2015"][0], jif_data["2016"][0], jif_data["2017"][0], jif_data["2018"][0], jif_data["2019"][0]],
name="JIF < 6",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[0],
line=dict(
color='#000000',
width=1.5)
)
)
jif_mediocre = go.Bar(
x=allyrs,
y=[jif_data["2013"][1], jif_data["2014"][1], jif_data["2015"][1], jif_data["2016"][1], jif_data["2017"][1], jif_data["2018"][1], jif_data["2019"][1]],
name="JIF = 6 - 9",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[7],
line=dict(
color='#000000',
width=1.5)
)
)
jif_good = go.Bar(
x=allyrs,
y=[jif_data["2013"][2], jif_data["2014"][2], jif_data["2015"][2], jif_data["2016"][2], jif_data["2017"][2], jif_data["2018"][2], jif_data["2019"][2]],
name="JIF = 9 - 25",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[9],
line=dict(
color='#000000',
width=1.5)
)
)
jif_high = go.Bar(
x=allyrs,
y=[jif_data["2013"][3], jif_data["2014"][3], jif_data["2015"][3], jif_data["2016"][3], jif_data["2017"][3], jif_data["2018"][3], jif_data["2019"][3]],
name="JIF > 25",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[1],
line=dict(
color='#000000',
width=1.5)
)
)
layout = go.Layout(
barmode="stack",
plot_bgcolor='rgba(0,0,0,0)',
width=1200,
height=600,
margin=go.layout.Margin(
l=100,
r=100,
b=100,
t=30,
pad=4
),
xaxis=dict(
showticklabels=True,
dtick=1,
zeroline=True,
tickfont=dict(
family='Arial',
size=28,
color='#000000'
)
),
yaxis=dict(
showticklabels=True,
gridcolor="#E2E5E0",
tickfont=dict(
family='Arial',
size=28,
color='#000000'
)
),
legend=dict(
traceorder='normal',
font=dict(
family='Arial',
size=20,
color='#000'
)
)
)
data = [jif_unknown, jif_low, jif_mediocre, jif_good, jif_high]
fig = go.Figure(data=data, layout=layout)
# plotly.io.write_image(fig, 'facility_onepagers_figures/{}_jif.png'.format(label["value"].lower().replace(" ", "_")))
# plotly.io.write_image(fig, 'facility_onepagers_figures/{}_jif.pdf'.format(label["value"].lower().replace(" ", "_")))
plotly.io.write_image(fig, 'Figure 12. Infrastructure publications with JIF distribution.png', scale=5)
if False:
publication_dois = list()
publication_issns = list()
publication_impacts = {"2013": [], "2014": [], "2015": [], "2016": [], "2017": [], "2018": [], "2019": []}
for pub in aff_allyrs:
if pub["doi"] in publication_dois or not check_valid_label(pub["labels"]):
continue
publication_dois.append(pub["doi"])
year = pub["published"].split("-")[0]
if pub["journal"]["issn"]:
issn = pub["journal"]["issn"]
publication_issns.append(issn)
impact = issn_to_impact(issn)
if impact is None:
print "NO IMPACT FACTOR FOUND FOR:", issn, pub["journal"]
# At the end, add the impact to the list
publication_impacts[year].append(impact)
jifflag = True
else:
# NO ISSN
publication_impacts[year].append(None)
jifflag = True
print "NO ISSN FOUND FOR:", issn, pub["journal"]
jif_data = {"2013": [0,0,0,0,0], "2014": [0,0,0,0,0], "2015": [0,0,0,0,0], "2016": [0,0,0,0,0], "2017": [0,0,0,0,0], "2018": [0,0,0,0,0], "2019": [0,0,0,0,0]}
for year in publication_impacts.keys():
for impact in publication_impacts[year]:
if impact is not None:
real_impact = float(impact)/1000
#print real_impact
if real_impact>25.0:
jif_data[year][3] += 1
continue
if real_impact>9.0:
jif_data[year][2] += 1
continue
if real_impact>6.0:
jif_data[year][1] += 1
continue
jif_data[year][0] += 1
else:
jif_data[year][4] += 1
jif_unknown = go.Bar(
x=allyrs,
y=[jif_data["2013"][4], jif_data["2014"][4], jif_data["2015"][4], jif_data["2016"][4], jif_data["2017"][4], jif_data["2018"][4], jif_data["2019"][4]],
name="JIF unknown",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[5],
line=dict(
color='#000000',
width=1.5)
)
)
jif_low = go.Bar(
x=allyrs,
y=[jif_data["2013"][0], jif_data["2014"][0], jif_data["2015"][0], jif_data["2016"][0], jif_data["2017"][0], jif_data["2018"][0], jif_data["2019"][0]],
name="JIF < 6",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[0],
line=dict(
color='#000000',
width=1.5)
)
)
jif_mediocre = go.Bar(
x=allyrs,
y=[jif_data["2013"][1], jif_data["2014"][1], jif_data["2015"][1], jif_data["2016"][1], jif_data["2017"][1], jif_data["2018"][1], jif_data["2019"][1]],
name="JIF = 6 - 9",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[7],
line=dict(
color='#000000',
width=1.5)
)
)
jif_good = go.Bar(
x=allyrs,
y=[jif_data["2013"][2], jif_data["2014"][2], jif_data["2015"][2], jif_data["2016"][2], jif_data["2017"][2], jif_data["2018"][2], jif_data["2019"][2]],
name="JIF = 9 - 25",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[9],
line=dict(
color='#000000',
width=1.5)
)
)
jif_high = go.Bar(
x=allyrs,
y=[jif_data["2013"][3], jif_data["2014"][3], jif_data["2015"][3], jif_data["2016"][3], jif_data["2017"][3], jif_data["2018"][3], jif_data["2019"][3]],
name="JIF > 25",
textfont=dict(
family='Arial',
size=28,
color='#000000'
),
marker=dict(
color=SCILIFE_COLOURS[1],
line=dict(
color='#000000',
width=1.5)
)
)
layout = go.Layout(
barmode="stack",
plot_bgcolor='rgba(0,0,0,0)',
width=1200,
height=600,
margin=go.layout.Margin(
l=100,
r=100,
b=100,
t=30,
pad=4
),
xaxis=dict(
showticklabels=True,
dtick=1,
zeroline=True,
tickfont=dict(
family='Arial',
size=28,
color='#000000'
)
),
yaxis=dict(
showticklabels=True,
gridcolor="#E2E5E0",
tickfont=dict(
family='Arial',
size=28,
color='#000000'
)
),
legend=dict(
traceorder='normal',
font=dict(
family='Arial',
size=20,
color='#000'
)
)
)
data = [jif_unknown, jif_low, jif_mediocre, jif_good, jif_high]
fig = go.Figure(data=data, layout=layout)
# plotly.io.write_image(fig, 'facility_onepagers_figures/{}_jif.png'.format(label["value"].lower().replace(" ", "_")))
# plotly.io.write_image(fig, 'facility_onepagers_figures/{}_jif.pdf'.format(label["value"].lower().replace(" ", "_")))
plotly.io.write_image(fig, 'jif_aff_allyrs.png', scale=5)
| 31.753378
| 219
| 0.513938
| 2,310
| 18,798
| 4.022944
| 0.099567
| 0.09265
| 0.022275
| 0.019369
| 0.843646
| 0.837189
| 0.822124
| 0.81782
| 0.81782
| 0.809857
| 0
| 0.124834
| 0.319449
| 18,798
| 591
| 220
| 31.807107
| 0.601579
| 0.097883
| 0
| 0.846457
| 0
| 0
| 0.121151
| 0.001537
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.009843
| null | null | 0.017717
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.