hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4955475f7cf934ee5f41474ce96923b06224f38c | 157 | py | Python | tasks/__init__.py | yangapku/OFA | 6bf21b0f2483d53b2750db1ea3fd103ec7d331d1 | [
"Apache-2.0"
] | 367 | 2022-02-07T10:46:36.000Z | 2022-03-31T14:20:57.000Z | tasks/__init__.py | yangapku/OFA | 6bf21b0f2483d53b2750db1ea3fd103ec7d331d1 | [
"Apache-2.0"
] | 29 | 2022-02-16T03:43:33.000Z | 2022-03-31T03:23:35.000Z | tasks/__init__.py | yangapku/OFA | 6bf21b0f2483d53b2750db1ea3fd103ec7d331d1 | [
"Apache-2.0"
] | 44 | 2022-02-11T05:14:59.000Z | 2022-03-30T19:54:33.000Z | from .cv_tasks import *
from .mm_tasks import *
from .nlg_tasks import *
from .nlu_tasks import *
from .pretrain_tasks import *
from .ofa_task import OFATask | 26.166667 | 29 | 0.783439 | 25 | 157 | 4.68 | 0.44 | 0.470085 | 0.641026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.146497 | 157 | 6 | 30 | 26.166667 | 0.873134 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
49848195993920c662c8b6d53fbd63436abf4d92 | 80 | py | Python | opac/queries/holding/__init__.py | rimphyd/Django-OPAC | d86f2e28fee7f2ec551aeeb98ec67caefc06a3fb | [
"MIT"
] | 1 | 2020-11-26T05:25:46.000Z | 2020-11-26T05:25:46.000Z | opac/queries/holding/__init__.py | rimphyd/Django-OPAC | d86f2e28fee7f2ec551aeeb98ec67caefc06a3fb | [
"MIT"
] | null | null | null | opac/queries/holding/__init__.py | rimphyd/Django-OPAC | d86f2e28fee7f2ec551aeeb98ec67caefc06a3fb | [
"MIT"
] | null | null | null | from .lend import * # noqa: F401 F403
from .cancel import * # noqa: F401 F403
| 26.666667 | 40 | 0.675 | 12 | 80 | 4.5 | 0.583333 | 0.37037 | 0.518519 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.193548 | 0.225 | 80 | 2 | 41 | 40 | 0.677419 | 0.3875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
772d047e44dad1ccf75c59807a45dfcede8325de | 117 | py | Python | torchero/utils/__init__.py | juancruzsosa/torchero | d1440b7a9c3ab2c1d3abbb282abb9ee1ea240797 | [
"MIT"
] | 10 | 2020-07-06T13:35:26.000Z | 2021-08-10T09:46:53.000Z | torchero/utils/__init__.py | juancruzsosa/torchero | d1440b7a9c3ab2c1d3abbb282abb9ee1ea240797 | [
"MIT"
] | 6 | 2020-07-07T20:52:16.000Z | 2020-07-14T04:05:02.000Z | torchero/utils/__init__.py | juancruzsosa/torchero | d1440b7a9c3ab2c1d3abbb282abb9ee1ea240797 | [
"MIT"
] | 1 | 2021-06-28T17:56:11.000Z | 2021-06-28T17:56:11.000Z | from torchero.utils.io import download_from_url
from torchero.utils.vision import show_image, show_imagegrid_dataset
| 39 | 68 | 0.880342 | 18 | 117 | 5.444444 | 0.666667 | 0.244898 | 0.346939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.076923 | 117 | 2 | 69 | 58.5 | 0.907407 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
774dd7242af0159f86cb3c076bdc2bd7dc214265 | 5,197 | py | Python | userbot/plugins/animazioni.py | sethgld/userbotseth | 2994aa84be57aed8ab036ed6f59342d9ca2f3cb2 | [
"MIT"
] | null | null | null | userbot/plugins/animazioni.py | sethgld/userbotseth | 2994aa84be57aed8ab036ed6f59342d9ca2f3cb2 | [
"MIT"
] | null | null | null | userbot/plugins/animazioni.py | sethgld/userbotseth | 2994aa84be57aed8ab036ed6f59342d9ca2f3cb2 | [
"MIT"
] | null | null | null | """
Available Commands:
.bombs
.fuck
.kiss
.love
.pornhub
.sex
.sexy
"""
from telethon import events
from userbot.utils import admin_cmd
import asyncio
@borg.on(admin_cmd(pattern=f"bombs", outgoing=True))
async def _(event):
if event.fwd_from:
return
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("ð゚メᆪð゚メᆪð゚メᆪð゚メᆪ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \nð゚メᆪð゚メᆪð゚メᆪð゚メᆪ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \nð゚メᆪð゚メᆪð゚メᆪð゚メᆪ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \nð゚メᆪð゚メᆪð゚メᆪð゚メᆪ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \nð゚メᆪð゚メᆪð゚メᆪð゚メᆪ \n")
await asyncio.sleep(1)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \nð゚メᆬð゚メᆬð゚メᆬð゚メᆬ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \nð゚メᆬð゚メᆬð゚メᆬð゚メᆬ \nð゚メᆬð゚メᆬð゚メᆬð゚メᆬ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \nð゚リᄉð゚リᄉð゚リᄉð゚リᄉ \n")
await asyncio.sleep(0.5)
await event.edit("`RIP ð゚リᄉð゚リᄉð゚リᄉ...`")
await asyncio.sleep(2)
@borg.on(admin_cmd(pattern=f"fuck", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.2
animation_ttl = range(0, 101)
#input_str = event.pattern_match.group(1)
#if input_str == "fuck":
await event.edit("fuck")
animation_chars = [
"ð゚ムノ ✊️",
"ð゚ムノ ✊️",
"ð゚ムノ ✊️",
"ð゚ムノ✊️ð゚メᆭ"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@borg.on(admin_cmd(pattern="love", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1.0
animation_ttl = range(0, 101)
#input_str = event.pattern_match.group(1)
#if input_str == "love":
await event.edit("love")
animation_chars = [
"L_",
"LO_",
"LOV_",
"LOVE_",
"LOVE❤",
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 10])
@borg.on(admin_cmd(pattern=f"kiss", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.2
animation_ttl = range(0, 101)
#input_str = event.pattern_match.group(1)
#if input_str == "kiss":
await event.edit("kiss")
animation_chars = [
"ð゚ᄂᄉ ð゚ムᄚ",
"ð゚ᄂᄉ ð゚ムᄚ",
"ð゚ᄂᄉ ð゚ムᄚ",
"ð゚ᄂᄉð゚メヒð゚ムᄚ"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@borg.on(admin_cmd(pattern="pornhub", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1.0
animation_ttl = range(0, 101)
#input_str = event.pattern_match.group(1)
#if input_str == "pornhub":
await event.edit("pornhub")
animation_chars = [
"P_",
"PO_",
"POR_",
"PORN_",
"PORNH_",
"PORNHU_",
"PORNHUB_",
"PORNHUB",
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 10])
@borg.on(admin_cmd(pattern=f"sex", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.2
animation_ttl = range(0, 101)
#input_str = event.pattern_match.group(1)
#if input_str == "sex":
await event.edit("sex")
animation_chars = [
"ð゚ᄂᄉ ð゚ムᄚ",
"ð゚ᄂᄉ ð゚ムᄚ",
"ð゚ᄂᄉ ð゚ムᄚ",
"ð゚ᄂᄉð゚ムᄐð゚ムᄚ"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@borg.on(admin_cmd(pattern="sexy", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1.0
animation_ttl = range(0, 101)
#input_str = event.pattern_match.group(1)
#if input_str == "sexy":
await event.edit("sexy")
animation_chars = [
"S_",
"SE_",
"SEX_",
"SEXY_",
"SEXYð゚ムト_",
"SEXYð゚ムト",
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 10])
| 23.83945 | 96 | 0.501058 | 729 | 5,197 | 3.854595 | 0.119342 | 0.092883 | 0.145196 | 0.115302 | 0.834875 | 0.833808 | 0.81032 | 0.81032 | 0.81032 | 0.798221 | 0 | 0.019868 | 0.33173 | 5,197 | 217 | 97 | 23.949309 | 0.703426 | 0.073119 | 0 | 0.569231 | 0 | 0 | 0.194684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.023077 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
91fc49211c2a39b8f9d248eb4f3acc664bbb0f4b | 6,683 | py | Python | loldib/getratings/models/NA/na_nidalee/na_nidalee_jng.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_nidalee/na_nidalee_jng.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_nidalee/na_nidalee_jng.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | from getratings.models.ratings import Ratings
class NA_Nidalee_Jng_Aatrox(Ratings):
pass
class NA_Nidalee_Jng_Ahri(Ratings):
pass
class NA_Nidalee_Jng_Akali(Ratings):
pass
class NA_Nidalee_Jng_Alistar(Ratings):
pass
class NA_Nidalee_Jng_Amumu(Ratings):
pass
class NA_Nidalee_Jng_Anivia(Ratings):
pass
class NA_Nidalee_Jng_Annie(Ratings):
pass
class NA_Nidalee_Jng_Ashe(Ratings):
pass
class NA_Nidalee_Jng_AurelionSol(Ratings):
pass
class NA_Nidalee_Jng_Azir(Ratings):
pass
class NA_Nidalee_Jng_Bard(Ratings):
pass
class NA_Nidalee_Jng_Blitzcrank(Ratings):
pass
class NA_Nidalee_Jng_Brand(Ratings):
pass
class NA_Nidalee_Jng_Braum(Ratings):
pass
class NA_Nidalee_Jng_Caitlyn(Ratings):
pass
class NA_Nidalee_Jng_Camille(Ratings):
pass
class NA_Nidalee_Jng_Cassiopeia(Ratings):
pass
class NA_Nidalee_Jng_Chogath(Ratings):
pass
class NA_Nidalee_Jng_Corki(Ratings):
pass
class NA_Nidalee_Jng_Darius(Ratings):
pass
class NA_Nidalee_Jng_Diana(Ratings):
pass
class NA_Nidalee_Jng_Draven(Ratings):
pass
class NA_Nidalee_Jng_DrMundo(Ratings):
pass
class NA_Nidalee_Jng_Ekko(Ratings):
pass
class NA_Nidalee_Jng_Elise(Ratings):
pass
class NA_Nidalee_Jng_Evelynn(Ratings):
pass
class NA_Nidalee_Jng_Ezreal(Ratings):
pass
class NA_Nidalee_Jng_Fiddlesticks(Ratings):
pass
class NA_Nidalee_Jng_Fiora(Ratings):
pass
class NA_Nidalee_Jng_Fizz(Ratings):
pass
class NA_Nidalee_Jng_Galio(Ratings):
pass
class NA_Nidalee_Jng_Gangplank(Ratings):
pass
class NA_Nidalee_Jng_Garen(Ratings):
pass
class NA_Nidalee_Jng_Gnar(Ratings):
pass
class NA_Nidalee_Jng_Gragas(Ratings):
pass
class NA_Nidalee_Jng_Graves(Ratings):
pass
class NA_Nidalee_Jng_Hecarim(Ratings):
pass
class NA_Nidalee_Jng_Heimerdinger(Ratings):
pass
class NA_Nidalee_Jng_Illaoi(Ratings):
pass
class NA_Nidalee_Jng_Irelia(Ratings):
pass
class NA_Nidalee_Jng_Ivern(Ratings):
pass
class NA_Nidalee_Jng_Janna(Ratings):
pass
class NA_Nidalee_Jng_JarvanIV(Ratings):
pass
class NA_Nidalee_Jng_Jax(Ratings):
pass
class NA_Nidalee_Jng_Jayce(Ratings):
pass
class NA_Nidalee_Jng_Jhin(Ratings):
pass
class NA_Nidalee_Jng_Jinx(Ratings):
pass
class NA_Nidalee_Jng_Kalista(Ratings):
pass
class NA_Nidalee_Jng_Karma(Ratings):
pass
class NA_Nidalee_Jng_Karthus(Ratings):
pass
class NA_Nidalee_Jng_Kassadin(Ratings):
pass
class NA_Nidalee_Jng_Katarina(Ratings):
pass
class NA_Nidalee_Jng_Kayle(Ratings):
pass
class NA_Nidalee_Jng_Kayn(Ratings):
pass
class NA_Nidalee_Jng_Kennen(Ratings):
pass
class NA_Nidalee_Jng_Khazix(Ratings):
pass
class NA_Nidalee_Jng_Kindred(Ratings):
pass
class NA_Nidalee_Jng_Kled(Ratings):
pass
class NA_Nidalee_Jng_KogMaw(Ratings):
pass
class NA_Nidalee_Jng_Leblanc(Ratings):
pass
class NA_Nidalee_Jng_LeeSin(Ratings):
pass
class NA_Nidalee_Jng_Leona(Ratings):
pass
class NA_Nidalee_Jng_Lissandra(Ratings):
pass
class NA_Nidalee_Jng_Lucian(Ratings):
pass
class NA_Nidalee_Jng_Lulu(Ratings):
pass
class NA_Nidalee_Jng_Lux(Ratings):
pass
class NA_Nidalee_Jng_Malphite(Ratings):
pass
class NA_Nidalee_Jng_Malzahar(Ratings):
pass
class NA_Nidalee_Jng_Maokai(Ratings):
pass
class NA_Nidalee_Jng_MasterYi(Ratings):
pass
class NA_Nidalee_Jng_MissFortune(Ratings):
pass
class NA_Nidalee_Jng_MonkeyKing(Ratings):
pass
class NA_Nidalee_Jng_Mordekaiser(Ratings):
pass
class NA_Nidalee_Jng_Morgana(Ratings):
pass
class NA_Nidalee_Jng_Nami(Ratings):
pass
class NA_Nidalee_Jng_Nasus(Ratings):
pass
class NA_Nidalee_Jng_Nautilus(Ratings):
pass
class NA_Nidalee_Jng_Nidalee(Ratings):
pass
class NA_Nidalee_Jng_Nocturne(Ratings):
pass
class NA_Nidalee_Jng_Nunu(Ratings):
pass
class NA_Nidalee_Jng_Olaf(Ratings):
pass
class NA_Nidalee_Jng_Orianna(Ratings):
pass
class NA_Nidalee_Jng_Ornn(Ratings):
pass
class NA_Nidalee_Jng_Pantheon(Ratings):
pass
class NA_Nidalee_Jng_Poppy(Ratings):
pass
class NA_Nidalee_Jng_Quinn(Ratings):
pass
class NA_Nidalee_Jng_Rakan(Ratings):
pass
class NA_Nidalee_Jng_Rammus(Ratings):
pass
class NA_Nidalee_Jng_RekSai(Ratings):
pass
class NA_Nidalee_Jng_Renekton(Ratings):
pass
class NA_Nidalee_Jng_Rengar(Ratings):
pass
class NA_Nidalee_Jng_Riven(Ratings):
pass
class NA_Nidalee_Jng_Rumble(Ratings):
pass
class NA_Nidalee_Jng_Ryze(Ratings):
pass
class NA_Nidalee_Jng_Sejuani(Ratings):
pass
class NA_Nidalee_Jng_Shaco(Ratings):
pass
class NA_Nidalee_Jng_Shen(Ratings):
pass
class NA_Nidalee_Jng_Shyvana(Ratings):
pass
class NA_Nidalee_Jng_Singed(Ratings):
pass
class NA_Nidalee_Jng_Sion(Ratings):
pass
class NA_Nidalee_Jng_Sivir(Ratings):
pass
class NA_Nidalee_Jng_Skarner(Ratings):
pass
class NA_Nidalee_Jng_Sona(Ratings):
pass
class NA_Nidalee_Jng_Soraka(Ratings):
pass
class NA_Nidalee_Jng_Swain(Ratings):
pass
class NA_Nidalee_Jng_Syndra(Ratings):
pass
class NA_Nidalee_Jng_TahmKench(Ratings):
pass
class NA_Nidalee_Jng_Taliyah(Ratings):
pass
class NA_Nidalee_Jng_Talon(Ratings):
pass
class NA_Nidalee_Jng_Taric(Ratings):
pass
class NA_Nidalee_Jng_Teemo(Ratings):
pass
class NA_Nidalee_Jng_Thresh(Ratings):
pass
class NA_Nidalee_Jng_Tristana(Ratings):
pass
class NA_Nidalee_Jng_Trundle(Ratings):
pass
class NA_Nidalee_Jng_Tryndamere(Ratings):
pass
class NA_Nidalee_Jng_TwistedFate(Ratings):
pass
class NA_Nidalee_Jng_Twitch(Ratings):
pass
class NA_Nidalee_Jng_Udyr(Ratings):
pass
class NA_Nidalee_Jng_Urgot(Ratings):
pass
class NA_Nidalee_Jng_Varus(Ratings):
pass
class NA_Nidalee_Jng_Vayne(Ratings):
pass
class NA_Nidalee_Jng_Veigar(Ratings):
pass
class NA_Nidalee_Jng_Velkoz(Ratings):
pass
class NA_Nidalee_Jng_Vi(Ratings):
pass
class NA_Nidalee_Jng_Viktor(Ratings):
pass
class NA_Nidalee_Jng_Vladimir(Ratings):
pass
class NA_Nidalee_Jng_Volibear(Ratings):
pass
class NA_Nidalee_Jng_Warwick(Ratings):
pass
class NA_Nidalee_Jng_Xayah(Ratings):
pass
class NA_Nidalee_Jng_Xerath(Ratings):
pass
class NA_Nidalee_Jng_XinZhao(Ratings):
pass
class NA_Nidalee_Jng_Yasuo(Ratings):
pass
class NA_Nidalee_Jng_Yorick(Ratings):
pass
class NA_Nidalee_Jng_Zac(Ratings):
pass
class NA_Nidalee_Jng_Zed(Ratings):
pass
class NA_Nidalee_Jng_Ziggs(Ratings):
pass
class NA_Nidalee_Jng_Zilean(Ratings):
pass
class NA_Nidalee_Jng_Zyra(Ratings):
pass
| 16.026379 | 46 | 0.77151 | 972 | 6,683 | 4.878601 | 0.151235 | 0.203712 | 0.407423 | 0.494728 | 0.808941 | 0.808941 | 0 | 0 | 0 | 0 | 0 | 0 | 0.166243 | 6,683 | 416 | 47 | 16.064904 | 0.851041 | 0 | 0 | 0.498195 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.498195 | 0.00361 | 0 | 0.501805 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 8 |
6205dcfc9a830cfb2576dda4b86bf676b06ca0a9 | 920 | py | Python | py_pdf_term/mappers.py | kumachan-mis/py-pdf-term | 282505826ce8c626003e753068d15738d772ce46 | [
"MIT"
] | null | null | null | py_pdf_term/mappers.py | kumachan-mis/py-pdf-term | 282505826ce8c626003e753068d15738d772ce46 | [
"MIT"
] | 1 | 2021-08-02T13:02:12.000Z | 2021-08-02T13:02:12.000Z | py_pdf_term/mappers.py | kumachan-mis/py-pdf-term | 282505826ce8c626003e753068d15738d772ce46 | [
"MIT"
] | null | null | null | from .endtoend._endtoend.mappers import (
AugmenterMapper,
BinaryOpenerMapper,
CandidateLayerCacheMapper,
CandidateTermFilterMapper,
CandidateTokenFilterMapper,
LanguageTokenizerMapper,
MethodLayerDataCacheMapper,
MethodLayerRankingCacheMapper,
MultiDomainRankingMethodMapper,
SingleDomainRankingMethodMapper,
SplitterMapper,
StylingLayerCacheMapper,
StylingScoreMapper,
XMLLayerCacheMapper,
)
# isort: unique-list
__all__ = [
"AugmenterMapper",
"BinaryOpenerMapper",
"CandidateLayerCacheMapper",
"CandidateTermFilterMapper",
"CandidateTokenFilterMapper",
"LanguageTokenizerMapper",
"MethodLayerDataCacheMapper",
"MethodLayerRankingCacheMapper",
"MultiDomainRankingMethodMapper",
"SingleDomainRankingMethodMapper",
"SplitterMapper",
"StylingLayerCacheMapper",
"StylingScoreMapper",
"XMLLayerCacheMapper",
]
| 26.285714 | 41 | 0.76087 | 37 | 920 | 18.783784 | 0.594595 | 0.094964 | 0.166906 | 0.238849 | 0.926619 | 0.926619 | 0.926619 | 0.926619 | 0.926619 | 0.926619 | 0 | 0 | 0.166304 | 920 | 34 | 42 | 27.058824 | 0.906128 | 0.019565 | 0 | 0 | 0 | 0 | 0.357778 | 0.264444 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.03125 | 0 | 0.03125 | 0 | 0 | 0 | 1 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
62402d43da34f87095e3a510b3097cc34846fe68 | 20,131 | py | Python | tests/query/v1/test_find_path.py | critical27/nebula-graph | 04d00e779e860ed3ddb226c416c335a22acc1147 | [
"Apache-2.0"
] | null | null | null | tests/query/v1/test_find_path.py | critical27/nebula-graph | 04d00e779e860ed3ddb226c416c335a22acc1147 | [
"Apache-2.0"
] | null | null | null | tests/query/v1/test_find_path.py | critical27/nebula-graph | 04d00e779e860ed3ddb226c416c335a22acc1147 | [
"Apache-2.0"
] | null | null | null | # --coding:utf-8--
#
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
from tests.common.nebula_test_suite import NebulaTestSuite
class TestFindPath(NebulaTestSuite):
@classmethod
def prepare(self):
self.use_nba()
def test_single_pair_constant_input(self):
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan" TO "Tony Parker" OVER like'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan" TO "LaMarcus Aldridge" OVER like'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tiago Splitter" TO "LaMarcus Aldridge" OVER like'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tiago Splitter", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tiago Splitter" TO "LaMarcus Aldridge" OVER like, teammate'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tiago Splitter", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"LaMarcus Aldridge")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tiago Splitter" TO "LaMarcus Aldridge" OVER *'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tiago Splitter", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"LaMarcus Aldridge")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
def test_all_pairs_all_paths_constant_input(self):
stmt = 'FIND ALL PATH FROM "Tim Duncan" TO "Tony Parker" OVER like UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge"), (b"like", 0, b"Tony Parker")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND ALL PATH FROM "Tim Duncan" TO "Tony Parker","Manu Ginobili" OVER like UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Manu Ginobili")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND ALL PATH FROM "Tim Duncan" TO "Tony Parker","LaMarcus Aldridge" OVER like UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge"), (b"like", 0, b"Tony Parker")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND ALL PATH FROM "Tim Duncan" TO "Tony Parker","Spurs" OVER like,serve UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Tim Duncan"), (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Manu Ginobili"), (b"like", 0, b"Tim Duncan"), (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge"), (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge"), (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"Manu Ginobili"), (b"serve", 0, b"Spurs")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
def test_multi_source_shortest_path(self):
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan" TO "Tony Parker","Spurs" OVER like,serve UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"serve", 0, b"Spurs")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan" TO "Tony Parker","Spurs" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"teammate", 0, b"Tony Parker")],
[b"Tim Duncan", (b"serve", 0, b"Spurs")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tony Parker", "Yao Ming" TO "Manu Ginobili", "Spurs", "Lakers" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Manu Ginobili")],
[b"Yao Ming", (b"like", 0, b"Tracy McGrady"), (b"serve", 0, b"Spurs")],
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"serve", 0, b"Lakers")],
[b"Tony Parker", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Danny Green"), (b"like", 0, b"LeBron James"), (b"serve", 0, b"Lakers")],
[b"Tony Parker", (b"teammate", 0, b"Tim Duncan"), (b"teammate", 0, b"Danny Green"), (b"like", 0, b"LeBron James"), (b"serve", 0, b"Lakers")],
[b"Tony Parker", (b"like", 0, b"Manu Ginobili")],
[b"Tony Parker", (b"teammate", 0, b"Manu Ginobili")],
[b"Tony Parker", (b"serve", 0, b"Spurs")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tony Parker", "Yao Ming" TO "Manu Ginobili", "Spurs", "Lakers" OVER * UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Manu Ginobili")],
[b"Yao Ming", (b"like", 0, b"Tracy McGrady"), (b"serve", 0, b"Spurs")],
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"serve", 0, b"Lakers")],
[b"Tony Parker", (b"like", 0, b"Manu Ginobili")],
[b"Tony Parker", (b"teammate", 0, b"Manu Ginobili")],
[b"Tony Parker", (b"serve", 0, b"Spurs")]
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Marco Belinelli", "Yao Ming" TO "Spurs", "Lakers" OVER * UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Yao Ming", (b"like", 0, b"Tracy McGrady"), (b"serve", 0, b"Spurs")],
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"serve", 0, b"Lakers")],
[b"Marco Belinelli", (b"like", 0, b"Danny Green"), (b"like", 0, b"LeBron James"), (b"serve", 0, b"Lakers")],
[b"Marco Belinelli", (b"serve", 0, b"Spurs")],
[b"Marco Belinelli", (b"serve", 1, b"Spurs")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan" TO "Tony Parker","LaMarcus Aldridge" OVER like UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker"), (b"like", 0, b"LaMarcus Aldridge")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan", "Tiago Splitter" TO "Tony Parker","Spurs" OVER like,serve UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Tiago Splitter", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Tiago Splitter", (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"serve", 0, b"Spurs")],
[b"Tim Duncan", (b"like", 0, b"Tony Parker")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Yao Ming" TO "Tony Parker","Tracy McGrady" OVER like,serve UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Yao Ming", (b"like", 0, b"Shaquile O'Neal"), (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Tony Parker")],
[b"Yao Ming", (b"like", 0, b"Tracy McGrady")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Shaquile O\'Neal" TO "Manu Ginobili", "Spurs", "Lakers" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"serve", 0, b"Spurs")],
[b"Shaquile O\'Neal", (b"serve", 0, b"Lakers")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Manu Ginobili")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Shaquile O\'Neal", "Nobody" TO "Manu Ginobili", "Spurs", "Lakers" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"serve", 0, b"Spurs")],
[b"Shaquile O\'Neal", (b"serve", 0, b"Lakers")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Manu Ginobili")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Shaquile O\'Neal" TO "Manu Ginobili", "Spurs", "Lakers" OVER like UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Shaquile O\'Neal" TO "Manu Ginobili", "Spurs", "Lakers" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"serve", 0, b"Spurs")],
[b"Shaquile O\'Neal", (b"serve", 0, b"Lakers")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Manu Ginobili")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Shaquile O\'Neal", "Nobody" TO "Manu Ginobili", "Spurs", "Lakers" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"serve", 0, b"Spurs")],
[b"Shaquile O\'Neal", (b"serve", 0, b"Lakers")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"teammate", 0, b"Manu Ginobili")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Shaquile O\'Neal" TO "Manu Ginobili", "Spurs", "Lakers" OVER like UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Shaquile O\'Neal", (b"like", 0, b"Tim Duncan"), (b"like", 0, b"Manu Ginobili")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
stmt = 'FIND SHORTEST PATH FROM "Marco Belinelli" TO "Spurs", "Lakers" OVER * UPTO 5 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": [
[b"Marco Belinelli", (b"serve", 0, b"Spurs")],
[b"Marco Belinelli", (b"serve", 1, b"Spurs")],
[b"Marco Belinelli", (b"like", 0, b"Danny Green"), (b"like", 0, b"LeBron James"), (b"serve", 0, b"Lakers")],
]
}
self.check_column_names(resp, expected_data["column_names"])
self.check_path_result_without_prop(resp.data.rows, expected_data["rows"])
def test_multi_source_no_path(self):
stmt = 'FIND SHORTEST PATH FROM "Tim Duncan" TO "Nobody","Spur" OVER like,serve UPTO 3 STEPS'
resp = self.execute_query(stmt)
self.check_resp_succeeded(resp)
expected_data = {
"column_names": ["_path"],
"rows": []
}
self.check_column_names(resp, expected_data["column_names"])
self.check_empty_result(resp)
| 52.698953 | 157 | 0.557747 | 2,704 | 20,131 | 4.007027 | 0.04068 | 0.031934 | 0.069774 | 0.081403 | 0.960683 | 0.957545 | 0.957545 | 0.956068 | 0.954499 | 0.9509 | 0 | 0.013933 | 0.272714 | 20,131 | 381 | 158 | 52.83727 | 0.726112 | 0.009885 | 0 | 0.713873 | 0 | 0.046243 | 0.326173 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014451 | false | 0 | 0.00289 | 0 | 0.020231 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6255feb1e86a04e27229a97e1a3307f8acdbf4ed | 29,297 | py | Python | yandex/cloud/datatransfer/v1/endpoint/postgres_pb2.py | korsar182/python-sdk | 873bf2a9b136a8f2faae72e86fae1f5b5c3d896a | [
"MIT"
] | 36 | 2018-12-23T13:51:50.000Z | 2022-03-25T07:48:24.000Z | yandex/cloud/datatransfer/v1/endpoint/postgres_pb2.py | korsar182/python-sdk | 873bf2a9b136a8f2faae72e86fae1f5b5c3d896a | [
"MIT"
] | 15 | 2019-02-28T04:55:09.000Z | 2022-03-06T23:17:24.000Z | yandex/cloud/datatransfer/v1/endpoint/postgres_pb2.py | korsar182/python-sdk | 873bf2a9b136a8f2faae72e86fae1f5b5c3d896a | [
"MIT"
] | 18 | 2019-02-23T07:10:57.000Z | 2022-03-28T14:41:08.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: yandex/cloud/datatransfer/v1/endpoint/postgres.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from yandex.cloud.datatransfer.v1.endpoint import common_pb2 as yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='yandex/cloud/datatransfer/v1/endpoint/postgres.proto',
package='yandex.cloud.datatransfer.v1.endpoint',
syntax='proto3',
serialized_options=b'\n)yandex.cloud.api.datatransfer.v1.endpointZRgithub.com/yandex-cloud/go-genproto/yandex/cloud/datatransfer/v1/endpoint;endpoint\252\002%Yandex.Cloud.Datatransfer.V1.EndPoint',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n4yandex/cloud/datatransfer/v1/endpoint/postgres.proto\x12%yandex.cloud.datatransfer.v1.endpoint\x1a\x32yandex/cloud/datatransfer/v1/endpoint/common.proto\"\x81\n\n\x1ePostgresObjectTransferSettings\x12L\n\x08sequence\x18\x01 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12U\n\x11sequence_owned_by\x18\x02 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12I\n\x05table\x18\x03 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12O\n\x0bprimary_key\x18\x04 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12Q\n\rfk_constraint\x18\x05 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12R\n\x0e\x64\x65\x66\x61ult_values\x18\x06 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12N\n\nconstraint\x18\x07 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12I\n\x05index\x18\x08 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12H\n\x04view\x18\t \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12L\n\x08\x66unction\x18\n \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12K\n\x07trigger\x18\x0b \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12H\n\x04type\x18\x0c \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12H\n\x04rule\x18\r \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12M\n\tcollation\x18\x0e \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12J\n\x06policy\x18\x0f \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\x12H\n\x04\x63\x61st\x18\x10 \x01(\x0e\x32:.yandex.cloud.datatransfer.v1.endpoint.ObjectTransferStage\"\x85\x01\n\x11OnPremisePostgres\x12\r\n\x05hosts\x18\x05 \x03(\t\x12\x0c\n\x04port\x18\x02 \x01(\x03\x12@\n\x08tls_mode\x18\x06 \x01(\x0b\x32..yandex.cloud.datatransfer.v1.endpoint.TLSMode\x12\x11\n\tsubnet_id\x18\x04 \x01(\t\"\x8c\x01\n\x12PostgresConnection\x12\x18\n\x0emdb_cluster_id\x18\x01 \x01(\tH\x00\x12N\n\non_premise\x18\x02 \x01(\x0b\x32\x38.yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgresH\x00\x42\x0c\n\nconnection\"\x8e\x03\n\x0ePostgresSource\x12M\n\nconnection\x18\x01 \x01(\x0b\x32\x39.yandex.cloud.datatransfer.v1.endpoint.PostgresConnection\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0c\n\x04user\x18\x03 \x01(\t\x12?\n\x08password\x18\x04 \x01(\x0b\x32-.yandex.cloud.datatransfer.v1.endpoint.Secret\x12\x16\n\x0einclude_tables\x18\x05 \x03(\t\x12\x16\n\x0e\x65xclude_tables\x18\x06 \x03(\t\x12\x1b\n\x13slot_byte_lag_limit\x18\x08 \x01(\x03\x12\x16\n\x0eservice_schema\x18\t \x01(\t\x12g\n\x18object_transfer_settings\x18\r \x01(\x0b\x32\x45.yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings\"\xc0\x01\n\x0ePostgresTarget\x12M\n\nconnection\x18\x01 \x01(\x0b\x32\x39.yandex.cloud.datatransfer.v1.endpoint.PostgresConnection\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0c\n\x04user\x18\x03 \x01(\t\x12?\n\x08password\x18\x04 \x01(\x0b\x32-.yandex.cloud.datatransfer.v1.endpoint.SecretB\xa7\x01\n)yandex.cloud.api.datatransfer.v1.endpointZRgithub.com/yandex-cloud/go-genproto/yandex/cloud/datatransfer/v1/endpoint;endpoint\xaa\x02%Yandex.Cloud.Datatransfer.V1.EndPointb\x06proto3'
,
dependencies=[yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2.DESCRIPTOR,])
_POSTGRESOBJECTTRANSFERSETTINGS = _descriptor.Descriptor(
name='PostgresObjectTransferSettings',
full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='sequence', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.sequence', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sequence_owned_by', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.sequence_owned_by', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='table', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.table', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='primary_key', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.primary_key', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fk_constraint', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.fk_constraint', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='default_values', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.default_values', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='constraint', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.constraint', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.index', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='view', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.view', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='function', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.function', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trigger', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.trigger', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.type', index=11,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rule', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.rule', index=12,
number=13, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='collation', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.collation', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='policy', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.policy', index=14,
number=15, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cast', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings.cast', index=15,
number=16, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=148,
serialized_end=1429,
)
_ONPREMISEPOSTGRES = _descriptor.Descriptor(
name='OnPremisePostgres',
full_name='yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgres',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='hosts', full_name='yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgres.hosts', index=0,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='port', full_name='yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgres.port', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tls_mode', full_name='yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgres.tls_mode', index=2,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='subnet_id', full_name='yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgres.subnet_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1432,
serialized_end=1565,
)
_POSTGRESCONNECTION = _descriptor.Descriptor(
name='PostgresConnection',
full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresConnection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mdb_cluster_id', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresConnection.mdb_cluster_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='on_premise', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresConnection.on_premise', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='connection', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresConnection.connection',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1568,
serialized_end=1708,
)
_POSTGRESSOURCE = _descriptor.Descriptor(
name='PostgresSource',
full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='connection', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.connection', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='database', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.database', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.user', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.password', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='include_tables', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.include_tables', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exclude_tables', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.exclude_tables', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='slot_byte_lag_limit', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.slot_byte_lag_limit', index=6,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service_schema', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.service_schema', index=7,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='object_transfer_settings', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresSource.object_transfer_settings', index=8,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1711,
serialized_end=2109,
)
_POSTGRESTARGET = _descriptor.Descriptor(
name='PostgresTarget',
full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresTarget',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='connection', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresTarget.connection', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='database', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresTarget.database', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresTarget.user', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='yandex.cloud.datatransfer.v1.endpoint.PostgresTarget.password', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2112,
serialized_end=2304,
)
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['sequence'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['sequence_owned_by'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['table'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['primary_key'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['fk_constraint'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['default_values'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['constraint'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['index'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['view'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['function'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['trigger'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['type'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['rule'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['collation'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['policy'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_POSTGRESOBJECTTRANSFERSETTINGS.fields_by_name['cast'].enum_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._OBJECTTRANSFERSTAGE
_ONPREMISEPOSTGRES.fields_by_name['tls_mode'].message_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._TLSMODE
_POSTGRESCONNECTION.fields_by_name['on_premise'].message_type = _ONPREMISEPOSTGRES
_POSTGRESCONNECTION.oneofs_by_name['connection'].fields.append(
_POSTGRESCONNECTION.fields_by_name['mdb_cluster_id'])
_POSTGRESCONNECTION.fields_by_name['mdb_cluster_id'].containing_oneof = _POSTGRESCONNECTION.oneofs_by_name['connection']
_POSTGRESCONNECTION.oneofs_by_name['connection'].fields.append(
_POSTGRESCONNECTION.fields_by_name['on_premise'])
_POSTGRESCONNECTION.fields_by_name['on_premise'].containing_oneof = _POSTGRESCONNECTION.oneofs_by_name['connection']
_POSTGRESSOURCE.fields_by_name['connection'].message_type = _POSTGRESCONNECTION
_POSTGRESSOURCE.fields_by_name['password'].message_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._SECRET
_POSTGRESSOURCE.fields_by_name['object_transfer_settings'].message_type = _POSTGRESOBJECTTRANSFERSETTINGS
_POSTGRESTARGET.fields_by_name['connection'].message_type = _POSTGRESCONNECTION
_POSTGRESTARGET.fields_by_name['password'].message_type = yandex_dot_cloud_dot_datatransfer_dot_v1_dot_endpoint_dot_common__pb2._SECRET
DESCRIPTOR.message_types_by_name['PostgresObjectTransferSettings'] = _POSTGRESOBJECTTRANSFERSETTINGS
DESCRIPTOR.message_types_by_name['OnPremisePostgres'] = _ONPREMISEPOSTGRES
DESCRIPTOR.message_types_by_name['PostgresConnection'] = _POSTGRESCONNECTION
DESCRIPTOR.message_types_by_name['PostgresSource'] = _POSTGRESSOURCE
DESCRIPTOR.message_types_by_name['PostgresTarget'] = _POSTGRESTARGET
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PostgresObjectTransferSettings = _reflection.GeneratedProtocolMessageType('PostgresObjectTransferSettings', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESOBJECTTRANSFERSETTINGS,
'__module__' : 'yandex.cloud.datatransfer.v1.endpoint.postgres_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.datatransfer.v1.endpoint.PostgresObjectTransferSettings)
})
_sym_db.RegisterMessage(PostgresObjectTransferSettings)
OnPremisePostgres = _reflection.GeneratedProtocolMessageType('OnPremisePostgres', (_message.Message,), {
'DESCRIPTOR' : _ONPREMISEPOSTGRES,
'__module__' : 'yandex.cloud.datatransfer.v1.endpoint.postgres_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.datatransfer.v1.endpoint.OnPremisePostgres)
})
_sym_db.RegisterMessage(OnPremisePostgres)
PostgresConnection = _reflection.GeneratedProtocolMessageType('PostgresConnection', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESCONNECTION,
'__module__' : 'yandex.cloud.datatransfer.v1.endpoint.postgres_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.datatransfer.v1.endpoint.PostgresConnection)
})
_sym_db.RegisterMessage(PostgresConnection)
PostgresSource = _reflection.GeneratedProtocolMessageType('PostgresSource', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESSOURCE,
'__module__' : 'yandex.cloud.datatransfer.v1.endpoint.postgres_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.datatransfer.v1.endpoint.PostgresSource)
})
_sym_db.RegisterMessage(PostgresSource)
PostgresTarget = _reflection.GeneratedProtocolMessageType('PostgresTarget', (_message.Message,), {
'DESCRIPTOR' : _POSTGRESTARGET,
'__module__' : 'yandex.cloud.datatransfer.v1.endpoint.postgres_pb2'
# @@protoc_insertion_point(class_scope:yandex.cloud.datatransfer.v1.endpoint.PostgresTarget)
})
_sym_db.RegisterMessage(PostgresTarget)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 61.290795 | 3,366 | 0.793631 | 3,678 | 29,297 | 5.97988 | 0.071778 | 0.040375 | 0.073429 | 0.103119 | 0.824225 | 0.799764 | 0.785078 | 0.768801 | 0.685096 | 0.634764 | 0 | 0.034491 | 0.095471 | 29,297 | 477 | 3,367 | 61.419287 | 0.795472 | 0.024678 | 0 | 0.646259 | 1 | 0.004535 | 0.271158 | 0.236983 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.011338 | 0.011338 | 0 | 0.011338 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
625702561f9ce753616e595faedf5161115b69ee | 11,100 | py | Python | app/controllers/web/admin/hosts.py | Clivern/Kraven | 5d8d2de26e170d853d7d5f2b1f2d453ab07e4401 | [
"Apache-2.0"
] | 3 | 2018-07-22T22:36:09.000Z | 2019-05-31T10:29:54.000Z | app/controllers/web/admin/hosts.py | Clivern/Kraven | 5d8d2de26e170d853d7d5f2b1f2d453ab07e4401 | [
"Apache-2.0"
] | 41 | 2018-07-22T22:07:52.000Z | 2018-11-14T11:07:48.000Z | app/controllers/web/admin/hosts.py | Clivern/Kraven | 5d8d2de26e170d853d7d5f2b1f2d453ab07e4401 | [
"Apache-2.0"
] | 1 | 2020-04-24T12:55:27.000Z | 2020-04-24T12:55:27.000Z | """
Hosts Web Controller
"""
# standard library
import os
# Django
from django.views import View
from django.shortcuts import render
from django.utils.translation import gettext as _
from django.http import Http404
# local Django
from app.modules.util.helpers import Helpers
from app.modules.core.context import Context
from app.modules.core.host import Host as Host_Module
from app.modules.core.decorators import login_if_not_authenticated
class Hosts_List(View):
template_name = 'templates/admin/hosts/docker/list.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request):
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("Hosts · %s") % self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))
})
return render(request, self.template_name, self.__context.get())
class Host_Create(View):
template_name = 'templates/admin/hosts/docker/create.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request):
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("Create a Host · %s") % self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))
})
return render(request, self.template_name, self.__context.get())
class Host_Edit(View):
template_name = 'templates/admin/hosts/docker/edit.html'
__context = Context()
__host_module = Host_Module()
__helpers = Helpers()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
host.auth_data = self.__helpers.json_loads(host.auth_data)
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("Edit %s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host
})
return render(request, self.template_name, self.__context.get())
class Host_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "main"
})
return render(request, self.template_name, self.__context.get())
class Host_Containers_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "containers"
})
return render(request, self.template_name, self.__context.get())
class Host_Images_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "images"
})
return render(request, self.template_name, self.__context.get())
class Host_Image_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug, image_id):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"image_id": image_id,
"screen": "image_view"
})
return render(request, self.template_name, self.__context.get())
class Host_Images_Pull_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "images_pull"
})
return render(request, self.template_name, self.__context.get())
class Host_Images_Build_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "images_build"
})
return render(request, self.template_name, self.__context.get())
class Host_Networks_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "networks"
})
return render(request, self.template_name, self.__context.get())
class Host_Services_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "services"
})
return render(request, self.template_name, self.__context.get())
class Host_Volumes_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "volumes"
})
return render(request, self.template_name, self.__context.get())
class Host_Actions_View(View):
template_name = 'templates/admin/hosts/docker/view.html'
__context = Context()
__host_module = Host_Module()
@login_if_not_authenticated
def get(self, request, host_slug):
host = self.__host_module.get_one_by_slug_user_id(host_slug, request.user.id)
if not host or request.user.id != host.user.id:
raise Http404("Host not found.")
self.__context.autoload_options()
self.__context.autoload_user(request.user.id if request.user.is_authenticated else None)
self.__context.push({
"page_title": _("%s Host · %s") % (host.name, self.__context.get("app_name", os.getenv("APP_NAME", "Kraven"))),
"host": host,
"screen": "actions"
})
return render(request, self.template_name, self.__context.get())
| 33.333333 | 128 | 0.65991 | 1,451 | 11,100 | 4.701585 | 0.057891 | 0.104808 | 0.066696 | 0.05277 | 0.909411 | 0.909411 | 0.909411 | 0.885811 | 0.885811 | 0.878628 | 0 | 0.004129 | 0.214595 | 11,100 | 332 | 129 | 33.433735 | 0.776898 | 0.005225 | 0 | 0.806306 | 0 | 0 | 0.130257 | 0.04496 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058559 | false | 0 | 0.040541 | 0 | 0.396396 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6556120c4b70e63b3c9e253894d2b8baa1adfb77 | 13,539 | py | Python | django/bossspatialdb/test/test_downsample_view.py | ArnaudGallardo/boss | c0d3bbca31575ac5442822b8d7f962def32d9072 | [
"Apache-2.0"
] | null | null | null | django/bossspatialdb/test/test_downsample_view.py | ArnaudGallardo/boss | c0d3bbca31575ac5442822b8d7f962def32d9072 | [
"Apache-2.0"
] | null | null | null | django/bossspatialdb/test/test_downsample_view.py | ArnaudGallardo/boss | c0d3bbca31575ac5442822b8d7f962def32d9072 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf import settings
from rest_framework.test import APITestCase, APIRequestFactory
from rest_framework.test import force_authenticate
from rest_framework import status
from bossspatialdb.views import Downsample
from bosscore.test.setup_db import SetupTestDB
from bosscore.error import BossError
import json
from unittest.mock import patch
version = settings.BOSS_VERSION
def mock_sfn_status(a, b):
return "RUNNING"
def mock_sfn_execute(a, b, c):
return "ARN:abc123"
def mock_sfn_cancel(session, arn, error="Error", cause="Unknown Cause"):
pass
class DownsampleInterfaceViewMixin(object):
def test_get_iso_properties_no_arg(self):
""" Test getting the properties of an isotropic channel"""
# Create request
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_iso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_iso', channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 8)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
self.assertEqual(response.data["voxel_size"]['0'], [6.0, 6.0, 6.0])
self.assertEqual(response.data["voxel_size"]['3'], [48.0, 48.0, 48.0])
self.assertEqual(response.data["voxel_size"]['5'], [192.0, 192.0, 192.0])
self.assertEqual(response.data["extent"]['0'], [2000, 5000, 200])
self.assertEqual(response.data["extent"]['3'], [250, 625, 25])
self.assertEqual(response.data["extent"]['5'], [63, 157, 7])
self.assertEqual(response.data["cuboid_size"]['0'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['3'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['5'], [512, 512, 16])
def test_get_iso_properties_iso_false(self):
""" Test getting the properties of an isotropic channel with arg but false"""
# Create request
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_iso/channel1/?iso=False',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_iso', channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 8)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
self.assertEqual(response.data["voxel_size"]['0'], [6.0, 6.0, 6.0])
self.assertEqual(response.data["voxel_size"]['3'], [48.0, 48.0, 48.0])
self.assertEqual(response.data["voxel_size"]['5'], [192.0, 192.0, 192.0])
self.assertEqual(response.data["extent"]['0'], [2000, 5000, 200])
self.assertEqual(response.data["extent"]['3'], [250, 625, 25])
self.assertEqual(response.data["extent"]['5'], [63, 157, 7])
self.assertEqual(response.data["cuboid_size"]['0'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['3'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['5'], [512, 512, 16])
def test_get_iso_properties_iso(self):
""" Test getting the properties of an isotropic channel with arg but true"""
# Create request
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_iso/channel1/?iso=True',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_iso', channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 8)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
self.assertEqual(response.data["voxel_size"]['0'], [6.0, 6.0, 6.0])
self.assertEqual(response.data["voxel_size"]['3'], [48.0, 48.0, 48.0])
self.assertEqual(response.data["voxel_size"]['5'], [192.0, 192.0, 192.0])
self.assertEqual(response.data["extent"]['0'], [2000, 5000, 200])
self.assertEqual(response.data["extent"]['3'], [250, 625, 25])
self.assertEqual(response.data["extent"]['5'], [63, 157, 7])
self.assertEqual(response.data["cuboid_size"]['0'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['3'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['5'], [512, 512, 16])
def test_get_aniso_properties_no_arg(self):
""" Test getting the properties of an anisotropic channel"""
# Create request
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_aniso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_aniso', channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 8)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
self.assertEqual(response.data["voxel_size"]['0'], [4.0, 4.0, 35.0])
self.assertEqual(response.data["voxel_size"]['3'], [32.0, 32.0, 35.0])
self.assertEqual(response.data["voxel_size"]['5'], [128.0, 128.0, 35.0])
self.assertEqual(response.data["extent"]['0'], [2000, 5000, 200])
self.assertEqual(response.data["extent"]['3'], [250, 625, 200])
self.assertEqual(response.data["extent"]['5'], [63, 157, 200])
self.assertEqual(response.data["cuboid_size"]['0'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['3'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['5'], [512, 512, 16])
def test_get_aniso_properties_iso_false(self):
""" Test getting the properties of an anisotropic channel with the iso arg false"""
# Create request
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_aniso/channel1/?iso=False',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_aniso', channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 8)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
self.assertEqual(response.data["voxel_size"]['0'], [4.0, 4.0, 35.0])
self.assertEqual(response.data["voxel_size"]['3'], [32.0, 32.0, 35.0])
self.assertEqual(response.data["voxel_size"]['5'], [128.0, 128.0, 35.0])
self.assertEqual(response.data["extent"]['0'], [2000, 5000, 200])
self.assertEqual(response.data["extent"]['3'], [250, 625, 200])
self.assertEqual(response.data["extent"]['5'], [63, 157, 200])
self.assertEqual(response.data["cuboid_size"]['0'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['3'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['5'], [512, 512, 16])
def test_get_aniso_properties_iso(self):
""" Test getting the properties of an anisotropic channel with the iso arg true"""
# Create request
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_aniso/channel1/?iso=True',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_aniso', channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 8)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
self.assertEqual(response.data["voxel_size"]['0'], [4.0, 4.0, 35.0])
self.assertEqual(response.data["voxel_size"]['3'], [32.0, 32.0, 35.0])
self.assertEqual(response.data["voxel_size"]['5'], [128.0, 128.0, 140])
self.assertEqual(response.data["extent"]['0'], [2000, 5000, 200])
self.assertEqual(response.data["extent"]['3'], [250, 625, 200])
self.assertEqual(response.data["extent"]['5'], [63, 157, 50])
self.assertEqual(response.data["cuboid_size"]['0'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['3'], [512, 512, 16])
self.assertEqual(response.data["cuboid_size"]['5'], [512, 512, 16])
def test_start_and_cancel_downsample_aniso(self):
self.dbsetup.insert_downsample_data()
factory = APIRequestFactory()
request = factory.post('/' + version + '/downsample/col1/exp_ds_aniso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_ds_aniso',
channel='channel1')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# Make Sure status has changed
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_ds_aniso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_ds_aniso',
channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["num_hierarchy_levels"], 5)
self.assertEqual(response.data["status"], "IN_PROGRESS")
# Cancel the downsample job
request = factory.delete('/' + version + '/downsample/col1/exp_ds_aniso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_ds_aniso',
channel='channel1')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
# Make Sure status has changed
factory = APIRequestFactory()
request = factory.get('/' + version + '/downsample/col1/exp_ds_aniso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_ds_aniso',
channel='channel1').render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["status"], "NOT_DOWNSAMPLED")
# Try to cancel the downsample job again, but it won't because in NOT_DOWNSAMPLED state
request = factory.delete('/' + version + '/downsample/col1/exp_ds_aniso/channel1/',
content_type='application/json')
# log in user
force_authenticate(request, user=self.user)
# Make request
response = Downsample.as_view()(request, collection='col1', experiment='exp_ds_aniso',
channel='channel1')
self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)
@patch('bossutils.aws.sfn_status', mock_sfn_status)
@patch('bossutils.aws.sfn_execute', mock_sfn_execute)
@patch('bossutils.aws.sfn_cancel', mock_sfn_cancel)
class TestDownsampleInterfaceView(DownsampleInterfaceViewMixin, APITestCase):
def setUp(self):
"""
Initialize the database
:return:
"""
# Create a user
self.dbsetup = SetupTestDB()
self.user = self.dbsetup.create_user('testuser')
# Populate DB
self.dbsetup.insert_spatialdb_test_data()
self.dbsetup.insert_iso_data()
| 49.95941 | 120 | 0.643105 | 1,635 | 13,539 | 5.190826 | 0.127217 | 0.141393 | 0.216802 | 0.219512 | 0.81996 | 0.808177 | 0.808177 | 0.805821 | 0.805821 | 0.803464 | 0 | 0.057445 | 0.211832 | 13,539 | 270 | 121 | 50.144444 | 0.737888 | 0.117143 | 0 | 0.72093 | 0 | 0 | 0.154574 | 0.043585 | 0 | 0 | 0 | 0 | 0.465116 | 1 | 0.063953 | false | 0.005814 | 0.052326 | 0.011628 | 0.139535 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
02c559412399119a32c5381ff76c74172617beb6 | 2,615 | py | Python | dset_loaders/collate_fn.py | Luodian/Learning-Invariant-Representations-and-Risks | f3058fe50e86660ca0c17ba0df41ece9af64c557 | [
"MIT"
] | 17 | 2021-04-22T03:24:38.000Z | 2022-03-30T03:12:09.000Z | dset_loaders/collate_fn.py | Luodian/Learning-Invariant-Representations-and-Risks | f3058fe50e86660ca0c17ba0df41ece9af64c557 | [
"MIT"
] | 5 | 2021-12-10T10:12:26.000Z | 2022-03-31T00:01:58.000Z | dset_loaders/collate_fn.py | Luodian/Learning-Invariant-Representations-and-Risks | f3058fe50e86660ca0c17ba0df41ece9af64c557 | [
"MIT"
] | 3 | 2021-05-19T06:12:14.000Z | 2021-12-17T09:27:49.000Z | import torch
def cls_collate_fn(batch):
r"""Puts each data field into a tensor with outer dimension batch size"""
batch_1_q_img = torch.stack([ _sample_['sample_1_q'][0] for _sample_ in batch])
batch_1_q_label = torch.LongTensor([ _sample_['sample_1_q'][1] for _sample_ in batch])
batch_1_k_img = torch.stack([ _sample_['sample_1_k'][0] for _sample_ in batch])
batch_1_k_label = torch.LongTensor([ _sample_['sample_1_k'][1] for _sample_ in batch])
batch_2_q_img = torch.stack([ _sample_['sample_2_q'][0] for _sample_ in batch])
batch_2_q_label = torch.LongTensor([ _sample_['sample_2_q'][1] for _sample_ in batch])
batch_2_k_img = torch.stack([ _sample_['sample_2_k'][0] for _sample_ in batch])
batch_2_k_label = torch.LongTensor([ _sample_['sample_2_k'][1] for _sample_ in batch])
batch_inputs = {
'sample_1_q':(batch_1_q_img, batch_1_q_label),
'sample_1_k':(batch_1_k_img, batch_1_k_label),
'sample_2_q':(batch_2_q_img, batch_2_q_label),
'sample_2_k':(batch_2_k_img, batch_2_k_label),
}
return batch_inputs
def reg_collate_fn(batch):
r"""Puts each data field into a tensor with outer dimension batch size"""
batch_1_q_img = torch.stack([ _sample_['sample_1_q'][0] for _sample_ in batch])
batch_1_q_label = torch.stack([ _sample_['sample_1_q'][1] for _sample_ in batch])
batch_1_q_mask = torch.stack([ _sample_['sample_1_q'][2] for _sample_ in batch])
batch_1_k_img = torch.stack([ _sample_['sample_1_k'][0] for _sample_ in batch])
batch_1_k_label = torch.stack([ _sample_['sample_1_k'][1] for _sample_ in batch])
batch_1_k_mask = torch.stack([ _sample_['sample_1_k'][2] for _sample_ in batch])
batch_2_q_img = torch.stack([ _sample_['sample_2_q'][0] for _sample_ in batch])
batch_2_q_label = torch.stack([ _sample_['sample_2_q'][1] for _sample_ in batch])
batch_2_q_mask = torch.stack([ _sample_['sample_2_q'][2] for _sample_ in batch])
batch_2_k_img = torch.stack([ _sample_['sample_2_k'][0] for _sample_ in batch])
batch_2_k_label = torch.stack([ _sample_['sample_2_k'][1] for _sample_ in batch])
batch_2_k_mask = torch.stack([ _sample_['sample_2_k'][2] for _sample_ in batch])
batch_inputs = {
'sample_1_q':(batch_1_q_img, batch_1_q_label, batch_1_q_mask),
'sample_1_k':(batch_1_k_img, batch_1_k_label, batch_1_k_mask),
'sample_2_q':(batch_2_q_img, batch_2_q_label, batch_2_q_mask),
'sample_2_k':(batch_2_k_img, batch_2_k_label, batch_2_k_mask),
}
return batch_inputs
collate_fns = {'cls': cls_collate_fn, 'reg': reg_collate_fn} | 62.261905 | 90 | 0.71587 | 462 | 2,615 | 3.469697 | 0.077922 | 0.07486 | 0.137243 | 0.199626 | 0.91204 | 0.91204 | 0.807236 | 0.801622 | 0.798503 | 0.798503 | 0 | 0.039891 | 0.156405 | 2,615 | 42 | 91 | 62.261905 | 0.686763 | 0.05086 | 0 | 0.3 | 0 | 0 | 0.115696 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.025 | 0 | 0.125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
02d03388b3efda7f4cdfd10232f5cbf0afa1646c | 435 | py | Python | bugtests/test116.py | doom38/jython_v2.2.1 | 0803a0c953c294e6d14f9fc7d08edf6a3e630a15 | [
"CNRI-Jython"
] | null | null | null | bugtests/test116.py | doom38/jython_v2.2.1 | 0803a0c953c294e6d14f9fc7d08edf6a3e630a15 | [
"CNRI-Jython"
] | null | null | null | bugtests/test116.py | doom38/jython_v2.2.1 | 0803a0c953c294e6d14f9fc7d08edf6a3e630a15 | [
"CNRI-Jython"
] | null | null | null | """
Check that UEE also matches IOError.
"""
import support
import java
try:
x = java.io.OutputStreamWriter(java.lang.System.out, "garbage")
except java.io.UnsupportedEncodingException, e:
pass
else:
raise support.TestError("Should raise an exception")
try:
x = java.io.OutputStreamWriter(java.lang.System.out, "garbage")
except IOError, e:
pass
else:
raise support.TestError("Should raise an exception")
| 18.125 | 67 | 0.721839 | 56 | 435 | 5.607143 | 0.482143 | 0.057325 | 0.050955 | 0.063694 | 0.700637 | 0.700637 | 0.700637 | 0.700637 | 0.700637 | 0.700637 | 0 | 0 | 0.165517 | 435 | 23 | 68 | 18.913043 | 0.865014 | 0 | 0 | 0.714286 | 0 | 0 | 0.163683 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.142857 | 0.142857 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
02d305a3d9c0e1729339694ce898ff91aa1ab5aa | 30,539 | py | Python | pc_db.py | napo178/m | e69268a979b49f1ac3c9f21320ae6ba242db7303 | [
"MIT"
] | null | null | null | pc_db.py | napo178/m | e69268a979b49f1ac3c9f21320ae6ba242db7303 | [
"MIT"
] | null | null | null | pc_db.py | napo178/m | e69268a979b49f1ac3c9f21320ae6ba242db7303 | [
"MIT"
] | null | null | null | import plotly.express as px
import plotly.graph_objects as go
import streamlit as st
import pandas as pd
html_header="""
<head>
<title>PControlDB</title>
<meta charset="utf-8">
<meta name="keywords" content="project control, dashboard, management, EVA">
<meta name="description" content="project control dashboard">
<meta name="author" content="Larry Prato">
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<h1 style="font-size:300%; color:#008080; font-family:Georgia"> PROJECT CONTROL <br>
<h2 style="color:#008080; font-family:Georgia"> DASHBOARD</h3> <br>
<hr style= " display: block;
margin-top: 0.5em;
margin-bottom: 0.5em;
margin-left: auto;
margin-right: auto;
border-style: inset;
border-width: 1.5px;"></h1>
"""
st.set_page_config(page_title="Project Control Dashboard", page_icon="", layout="wide")
st.markdown('<style>body{background-color: #fbfff0}</style>',unsafe_allow_html=True)
st.markdown(html_header, unsafe_allow_html=True)
st.markdown(""" <style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style> """, unsafe_allow_html=True)
data=pd.read_excel('curva.xlsx')
html_card_header1="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 350px;
height: 50px;">
<h3 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Global Actual Progress</h3>
</div>
</div>
"""
html_card_footer1="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 350px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Baseline 46%</p>
</div>
</div>
"""
html_card_header2="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 350px;
height: 50px;">
<h3 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Global Spend Hours</h3>
</div>
</div>
"""
html_card_footer2="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 350px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Baseline 92.700</p>
</div>
</div>
"""
html_card_header3="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 350px;
height: 50px;">
<h3 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">TCPI</h3>
</div>
</div>
"""
html_card_footer3="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 350px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">To Complete Performance Index ≤ 1.00</p>
</div>
</div>
"""
### Block 1#########################################################################################
with st.beta_container():
col1, col2, col3, col4, col5, col6, col7 = st.beta_columns([1,15,1,15,1,15,1])
with col1:
st.write("")
with col2:
st.markdown(html_card_header1, unsafe_allow_html=True)
fig_c1 = go.Figure(go.Indicator(
mode="number+delta",
value=35,
number={'suffix': "%", "font": {"size": 40, 'color': "#008080", 'family': "Arial"}},
delta={'position': "bottom", 'reference': 46, 'relative': False},
domain={'x': [0, 1], 'y': [0, 1]}))
fig_c1.update_layout(autosize=False,
width=350, height=90, margin=dict(l=20, r=20, b=20, t=30),
paper_bgcolor="#fbfff0", font={'size': 20})
st.plotly_chart(fig_c1)
st.markdown(html_card_footer1, unsafe_allow_html=True)
with col3:
st.write("")
with col4:
st.markdown(html_card_header2, unsafe_allow_html=True)
fig_c2 = go.Figure(go.Indicator(
mode="number+delta",
value=73500,
number={'suffix': " HH", "font": {"size": 40, 'color': "#008080", 'family': "Arial"}, 'valueformat': ',f'},
delta={'position': "bottom", 'reference': 92700},
domain={'x': [0, 1], 'y': [0, 1]}))
fig_c2.update_layout(autosize=False,
width=350, height=90, margin=dict(l=20, r=20, b=20, t=30),
paper_bgcolor="#fbfff0", font={'size': 20})
fig_c2.update_traces(delta_decreasing_color="#3D9970",
delta_increasing_color="#FF4136",
delta_valueformat='f',
selector=dict(type='indicator'))
st.plotly_chart(fig_c2)
st.markdown(html_card_footer2, unsafe_allow_html=True)
with col5:
st.write("")
with col6:
st.markdown(html_card_header3, unsafe_allow_html=True)
fig_c3 = go.Figure(go.Indicator(
mode="number+delta",
value=1.085,
number={"font": {"size": 40, 'color': "#008080", 'family': "Arial"}},
delta={'position': "bottom", 'reference': 1, 'relative': False},
domain={'x': [0, 1], 'y': [0, 1]}))
fig_c3.update_layout(autosize=False,
width=350, height=90, margin=dict(l=20, r=20, b=20, t=30),
paper_bgcolor="#fbfff0", font={'size': 20})
fig_c3.update_traces(delta_decreasing_color="#3D9970",
delta_increasing_color="#FF4136",
delta_valueformat='.3f',
selector=dict(type='indicator'))
st.plotly_chart(fig_c3)
st.markdown(html_card_footer3, unsafe_allow_html=True)
with col7:
st.write("")
html_br="""
<br>
"""
st.markdown(html_br, unsafe_allow_html=True)
html_card_header4="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 250px;
height: 50px;">
<h4 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 10px 0;">Global Actual Progress</h4>
</div>
</div>
"""
html_card_footer4="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Value (%)</p>
</div>
</div>
"""
html_card_header5="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 250px;
height: 50px;">
<h4 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 10px 0;">Global Spend Hours</h4>
</div>
</div>
"""
html_card_footer5="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Relative Change (%)</p>
</div>
</div>
"""
### Block 2#########################################################################################
with st.beta_container():
col1, col2, col3, col4, col5, col6, col7 = st.beta_columns([1,10,1,10,1,20,1])
with col1:
st.write("")
with col2:
st.markdown(html_card_header4, unsafe_allow_html=True)
x = ['Actual', 'Previous', 'Average', 'Planned']
y = [5.5, 4.2, 6.3, 8.5]
fig_m_prog = go.Figure([go.Bar(x=x, y=y, text=y, textposition='auto')])
fig_m_prog.update_layout(paper_bgcolor="#fbfff0", plot_bgcolor="#fbfff0",
font={'color': "#008080", 'family': "Arial"}, height=100, width=250,
margin=dict(l=15, r=1, b=4, t=4))
fig_m_prog.update_yaxes(title='y', visible=False, showticklabels=False)
fig_m_prog.update_traces(marker_color='#17A2B8', selector=dict(type='bar'))
st.plotly_chart(fig_m_prog)
st.markdown(html_card_footer4, unsafe_allow_html=True)
with col3:
st.write("")
with col4:
st.markdown(html_card_header5, unsafe_allow_html=True)
x = ['Δ vs Prev', 'Δ vs Aver', 'Δ vs Plan']
y = [10, 12, 8]
fig_m_hh = go.Figure([go.Bar(x=x, y=y, text=y, textposition='auto')])
fig_m_hh.update_layout(paper_bgcolor="#fbfff0", plot_bgcolor="#fbfff0",
font={'color': "#008080", 'family': "Arial"}, height=100, width=250,
margin=dict(l=15, r=1, b=1, t=1))
fig_m_hh.update_yaxes(title='y', visible=False, showticklabels=False)
fig_m_hh.update_traces(marker_color='#17A2B8', selector=dict(type='bar'))
st.plotly_chart(fig_m_hh)
st.markdown(html_card_footer5, unsafe_allow_html=True)
with col5:
st.write("")
with col6:
y = data.loc[data.Activity_name == 'Total']
# Create traces
fig3 = go.Figure()
fig3.add_trace(go.Scatter(x=y['Date'], y=y['Progress'],
mode='lines',
name='Progress',
marker_color='#FF4136'))
fig3.add_trace(go.Scatter(x=y['Date'], y=y['Baseline'],
mode='lines',
name='Baseline',
marker_color='#17A2B8'))
fig3.update_layout(title={'text': "Actual Progress vs Planned", 'x': 0.5}, paper_bgcolor="#fbfff0",
plot_bgcolor="#fbfff0", font={'color': "#008080", 'size': 12, 'family': "Georgia"}, height=220,
width=540,
legend=dict(orientation="h",
yanchor="top",
y=0.99,
xanchor="left",
x=0.01),
margin=dict(l=1, r=1, b=1, t=30))
fig3.update_xaxes(showline=True, linewidth=1, linecolor='#F7F7F7', mirror=True, nticks=6, rangemode="tozero",
showgrid=False, gridwidth=0.5, gridcolor='#F7F7F7')
fig3.update_yaxes(showline=True, linewidth=1, linecolor='#F7F7F7', mirror=True, nticks=10, rangemode="tozero",
showgrid=True, gridwidth=0.5, gridcolor='#F7F7F7')
fig3.layout.yaxis.tickformat = ',.0%'
st.plotly_chart(fig3)
with col7:
st.write("")
html_br="""
<br>
"""
st.markdown(html_br, unsafe_allow_html=True)
html_card_header6="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 250px;
height: 50px;">
<h4 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 10px 0;">Cost Variance</h4>
</div>
</div>
"""
html_card_footer6="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Value </p>
</div>
</div>
"""
html_card_header7="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 250px;
height: 50px;">
<h4 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 10px 0;">Schedule Variance</h4>
</div>
</div>
"""
html_card_footer7="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Value</p>
</div>
</div>
"""
### Block 3#########################################################################################
with st.beta_container():
col1, col2, col3, col4, col5, col6, col7 = st.beta_columns([1,10,1,10,1,20,1])
with col1:
st.write("")
with col2:
st.markdown(html_card_header6, unsafe_allow_html=True)
fig_cv = go.Figure(go.Indicator(
mode="gauge+number+delta",
value=1.05,
number={"font": {"size": 22, 'color': "#008080", 'family': "Arial"}, "valueformat": "#,##0"},
domain={'x': [0, 1], 'y': [0, 1]},
gauge={
'axis': {'range': [None, 1.5], 'tickwidth': 1, 'tickcolor': "black"},
'bar': {'color': "#06282d"},
'bgcolor': "white",
'steps': [
{'range': [0, 1], 'color': '#FF4136'},
{'range': [1, 1.5], 'color': '#3D9970'}]}))
fig_cv.update_layout(paper_bgcolor="#fbfff0", font={'color': "#008080", 'family': "Arial"}, height=135, width=250,
margin=dict(l=10, r=10, b=15, t=20))
st.plotly_chart(fig_cv)
st.markdown(html_card_footer6, unsafe_allow_html=True)
with col3:
st.write("")
with col4:
st.markdown(html_card_header7, unsafe_allow_html=True)
fig_sv = go.Figure(go.Indicator(
mode="gauge+number+delta",
value=0.95,
number={"font": {"size": 22, 'color': "#008080", 'family': "Arial"}, "valueformat": "#,##0"},
domain={'x': [0, 1], 'y': [0, 1]},
gauge={
'axis': {'range': [None, 1.5], 'tickwidth': 1, 'tickcolor': "black"},
'bar': {'color': "#06282d"},
'bgcolor': "white",
'steps': [
{'range': [0, 1], 'color': '#FF4136'},
{'range': [1, 1.5], 'color': '#3D9970'}]}))
fig_sv.update_layout(paper_bgcolor="#fbfff0", font={'color': "#008080", 'family': "Arial"}, height=135, width=250,
margin=dict(l=10, r=10, b=15, t=20))
st.plotly_chart(fig_sv)
st.markdown(html_card_footer7, unsafe_allow_html=True)
with col5:
st.write("")
with col6:
y = data.loc[data.Activity_name == 'Total']
y = data.loc[data.Activity_name == 'Total']
fig_hh = go.Figure()
fig_hh.add_trace(go.Bar(
x=y['Date'],
y=y['Spend_Hours'],
name='Spend Hours',
marker_color='#FF4136'
))
fig_hh.add_trace(go.Bar(
x=y['Date'],
y=y['Planned_Hours'],
name='Planned Hours',
marker_color='#17A2B8'
))
fig_hh.update_layout(barmode='group', title={'text': 'Spend Hours vs Planned', 'x': 0.5}, paper_bgcolor="#fbfff0",
plot_bgcolor="#fbfff0", font={'color': "#008080", 'family': "Georgia"}, height=250, width=540,
legend=dict(orientation="h",
yanchor="top",
y=0.99,
xanchor="left",
x=0.01),
margin=dict(l=5, r=1, b=1, t=25))
fig_hh.update_xaxes(showline=True, linewidth=1, linecolor='#F7F7F7', mirror=True, nticks=6, rangemode="tozero",
showgrid=False, gridwidth=0.5, gridcolor='#F7F7F7')
fig_hh.update_yaxes(showline=True, linewidth=1, linecolor='#F7F7F7', mirror=True, nticks=10, rangemode="tozero",
showgrid=False, gridwidth=0.5, gridcolor='#F7F7F7')
st.plotly_chart(fig_hh)
with col7:
st.write("")
html_br="""
<br>
"""
st.markdown(html_br, unsafe_allow_html=True)
html_subtitle="""
<h2 style="color:#008080; font-family:Georgia;"> Details by Discipline: </h2>
"""
st.markdown(html_subtitle, unsafe_allow_html=True)
html_table="""
<table>
<tr style="background-color:#eef9ea; color:#008080; font-family:Georgia; font-size: 15px">
<th style="width:130px">Discipline</th>
<th style="width:90px">Baseline</th>
<th style="width:90px">Progress</th>
<th style="width:90px">Manpower</th>
<th style="width:90px">Cost Variance</th>
<th style="width:90px">Schedule Variance</th>
</tr>
<tr style="height: 40px; color:#008080; font-size: 14px">
<th>Civil</th>
<th>70,00%</th>
<th>68,50%</th>
<th>70.000</th>
<th>0,99</th>
<th>1,09</th>
</tr>
<tr style="background-color:#eef9ea; height: 40px; color:#008080; font-size: 14px">
<th>Mechanical</th>
<th>50,00%</th>
<th>45,50%</th>
<th>10.000</th>
<th>0,95</th>
<th>0,98</th>
</tr>
<tr style="height: 40px; color:#008080; font-size: 14px">
<th>Piping</th>
<th>30,00%</th>
<th>30,00%</th>
<th>60.000</th>
<th>0,99</th>
<th>1,01</th>
</tr>
<tr style="background-color:#eef9ea; height: 40px; color:#008080; font-size: 14px">
<th>Electricity</th>
<th>20,00%</th>
<th>15,00%</th>
<th>40.000</th>
<th>0,90</th>
<th>0,98</th>
</tr>
<tr style="height: 40px; color:#008080; font-size: 14px">
<th>Intrumentation</th>
<th>5,00%</th>
<th>0,00%</th>
<th>30.000</th>
<th>-</th>
<th>-</th>
</tr>
<tr style="background-color:#eef9ea; height: 40px; color:#008080; font-size: 14px">
<th>Commissioning</th>
<th>0,00%</th>
<th>0,00%</th>
<th>15.000</th>
<th>-</th>
<th>-</th>
</tr>
<tr style="height: 40px; color:#008080; font-size: 15px">
<th>Total</th>
<th>35,00%</th>
<th>46,00%</th>
<th>225.000</th>
<th>0,97</th>
<th>0,91</th>
</tr>
</table>
"""
### Block 4#########################################################################################
with st.beta_container():
col1, col2, col3 = st.beta_columns([12,1,12])
with col1:
st.markdown(html_table, unsafe_allow_html=True)
with col2:
st.write("")
with col3:
# *******Gantt Chart
df = pd.DataFrame([
dict(Disc="Civ", Start='2021-01-04', Finish='2021-08-10'),
dict(Disc="Mec", Start='2021-03-05', Finish='2021-09-15'),
dict(Disc="Pip", Start='2021-04-20', Finish='2021-11-30'),
dict(Disc="Ele", Start='2021-05-20', Finish='2021-12-05'),
dict(Disc="Ins", Start='2021-06-20', Finish='2021-12-20'),
dict(Disc="Com", Start='2021-07-20', Finish='2021-12-30')
])
fig2 = px.timeline(df, x_start="Start", x_end="Finish", y='Disc')
fig2.update_yaxes(autorange="reversed")
fig2.update_layout(title={'text': "Main dates", 'x': 0.5}, plot_bgcolor="#eef9ea", paper_bgcolor="#eef9ea",
font={'color': "#008080", 'family': "Georgia"}, height=340, width=550, margin=dict(
l=51, r=5, b=10, t=50))
fig2.update_traces(marker_color='#17A2B8', selector=dict(type='bar'))
st.plotly_chart(fig2)
disciplinas= ['Civil', 'Mechanical', 'Piping', 'Electricity', 'Instrumentation', 'Commissioning']
selected_disc = st.selectbox(' Select discipline', disciplinas)
html_br="""
<br>
"""
st.markdown(html_br, unsafe_allow_html=True)
html_card_header4="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 10px; width: 250px;
height: 50px;">
<h5 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 5px 0;">Progress For Selected Discipline</h5>
</div>
</div>
"""
html_card_footer4="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Value (%)</p>
</div>
</div>
"""
html_card_header5="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 10px; width: 250px;
height: 50px;">
<h5 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 5px 0;">Spend Hours For Selected Discipline</h5>
</div>
</div>
"""
html_card_footer5="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Relative Change (%)</p>
</div>
</div>
"""
### Block 5#########################################################################################
with st.beta_container():
col1, col2, col3, col4, col5, col6, col7 = st.beta_columns([1,10,1,10,1,20,1])
with col1:
st.write("")
with col2:
st.markdown(html_card_header4, unsafe_allow_html=True)
x = ['Actual', 'Previous', 'Average', 'Planned']
y = [5.5, 4.2, 6.3, 8.5]
fig_m_prog = go.Figure([go.Bar(x=x, y=y, text=y, textposition='auto')])
fig_m_prog.update_layout(paper_bgcolor="#fbfff0", plot_bgcolor="#fbfff0",
font={'color': "#008080", 'family': "Arial"}, height=100, width=250,
margin=dict(l=15, r=1, b=4, t=4))
fig_m_prog.update_yaxes(title='y', visible=False, showticklabels=False)
fig_m_prog.update_traces(marker_color='#17A2B8', selector=dict(type='bar'))
st.plotly_chart(fig_m_prog)
st.markdown(html_card_footer4, unsafe_allow_html=True)
with col3:
st.write("")
with col4:
st.markdown(html_card_header5, unsafe_allow_html=True)
x = ['Δ vs Prev', 'Δ vs Aver', 'Δ vs Plan']
y = [10, 12, 8]
fig_m_hh = go.Figure([go.Bar(x=x, y=y, text=y, textposition='auto')])
fig_m_hh.update_layout(paper_bgcolor="#fbfff0", plot_bgcolor="#fbfff0",
font={'color': "#008080", 'family': "Arial"}, height=100, width=250,
margin=dict(l=15, r=1, b=1, t=1))
fig_m_hh.update_yaxes(title='y', visible=False, showticklabels=False)
fig_m_hh.update_traces(marker_color='#17A2B8', selector=dict(type='bar'))
st.plotly_chart(fig_m_hh)
st.markdown(html_card_footer5, unsafe_allow_html=True)
with col5:
st.write("")
with col6:
y = data.loc[data.Activity_name == 'Total']
# Create traces
fig3 = go.Figure()
fig3.add_trace(go.Scatter(x=y['Date'], y=y['Progress'],
mode='lines',
name='Progress',
marker_color='#FF4136'))
fig3.add_trace(go.Scatter(x=y['Date'], y=y['Baseline'],
mode='lines',
name='Baseline',
marker_color='#17A2B8'))
fig3.update_layout(title={'text': "Actual Progress vs Planned", 'x': 0.5}, paper_bgcolor="#fbfff0",
plot_bgcolor="#fbfff0", font={'color': "#008080", 'size': 12, 'family': "Georgia"}, height=220,
width=540,
legend=dict(orientation="h",
yanchor="top",
y=0.99,
xanchor="left",
x=0.01),
margin=dict(l=1, r=1, b=1, t=30))
fig3.update_xaxes(showline=True, linewidth=1, linecolor='#F7F7F7', mirror=True, nticks=6, rangemode="tozero",
showgrid=False, gridwidth=0.5, gridcolor='#F7F7F7')
fig3.update_yaxes(showline=True, linewidth=1, linecolor='#F7F7F7', mirror=True, nticks=10, rangemode="tozero",
showgrid=True, gridwidth=0.5, gridcolor='#F7F7F7')
fig3.layout.yaxis.tickformat = ',.0%'
st.plotly_chart(fig3)
with col7:
st.write("")
html_br="""
<br>
"""
st.markdown(html_br, unsafe_allow_html=True)
html_card_header6="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 10px; width: 250px;
height: 50px;">
<h5 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 5px 0;">Cost Variance For Selected Discipline</h5>
</div>
</div>
"""
html_card_footer6="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Value </p>
</div>
</div>
"""
html_card_header7="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 250px;
height: 50px;">
<h5 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 8px 0;">Schedule Variance For Selected Discipline</h5>
</div>
</div>
"""
html_card_footer7="""
<div class="card">
<div class="card-body" style="border-radius: 0px 0px 10px 10px; background: #eef9ea; padding-top: 1rem;; width: 250px;
height: 50px;">
<p class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 0px 0;">Montly Value</p>
</div>
</div>
"""
html_card_header8="""
<div class="card">
<div class="card-body" style="border-radius: 10px 10px 0px 0px; background: #eef9ea; padding-top: 5px; width: 550px;
height: 50px;">
<h5 class="card-title" style="background-color:#eef9ea; color:#008080; font-family:Georgia; text-align: center; padding: 10px 0;">Main Issues By Discipline</h5>
</div>
</div>
"""
html_list="""
<ul style="color:#008080; font-family:Georgia; font-size: 15px">
<li>Nulla volutpat aliquam velit</li>
<li>Maecenas sed diam eget risus varius blandit</li>
<li>Etiam porta sem malesuada magna mollis euismod</li>
<li>Fusce dapibus, tellus ac cursus commodo</li>
<li>Maecenas sed diam eget risus varius blandit</li>
</ul>
"""
### Block 6#########################################################################################
with st.beta_container():
col1, col2, col3, col4, col5, col6, col7 = st.beta_columns([1,10,1,10,1,20,1])
with col1:
st.write("")
with col2:
st.markdown(html_card_header6, unsafe_allow_html=True)
fig_cv = go.Figure(go.Indicator(
mode="gauge+number+delta",
value=1.05,
number={"font": {"size": 22, 'color': "#008080", 'family': "Arial"}, "valueformat": "#,##0"},
domain={'x': [0, 1], 'y': [0, 1]},
gauge={
'axis': {'range': [None, 1.5], 'tickwidth': 1, 'tickcolor': "black"},
'bar': {'color': "#06282d"},
'bgcolor': "white",
'steps': [
{'range': [0, 1], 'color': '#FF4136'},
{'range': [1, 1.5], 'color': '#3D9970'}]}))
fig_cv.update_layout(paper_bgcolor="#fbfff0", font={'color': "#008080", 'family': "Arial"}, height=135, width=250,
margin=dict(l=10, r=10, b=15, t=20))
st.plotly_chart(fig_cv)
st.markdown(html_card_footer6, unsafe_allow_html=True)
with col3:
st.write("")
with col4:
st.markdown(html_card_header7, unsafe_allow_html=True)
fig_sv = go.Figure(go.Indicator(
mode="gauge+number+delta",
value=0.95,
number={"font": {"size": 22, 'color': "#008080", 'family': "Arial"}, "valueformat": "#,##0"},
domain={'x': [0, 1], 'y': [0, 1]},
gauge={
'axis': {'range': [None, 1.5], 'tickwidth': 1, 'tickcolor': "black"},
'bar': {'color': "#06282d"},
'bgcolor': "white",
'steps': [
{'range': [0, 1], 'color': '#FF4136'},
{'range': [1, 1.5], 'color': '#3D9970'}]}))
fig_sv.update_layout(paper_bgcolor="#fbfff0", font={'color': "#008080", 'family': "Arial"}, height=135, width=250,
margin=dict(l=10, r=10, b=15, t=20))
st.plotly_chart(fig_sv)
st.markdown(html_card_footer7, unsafe_allow_html=True)
with col5:
st.write("")
with col6:
st.markdown(html_card_header8, unsafe_allow_html=True)
st.markdown(html_list, unsafe_allow_html=True)
with col7:
st.write("")
html_line="""
<br>
<br>
<br>
<br>
<hr style= " display: block;
margin-top: 0.5em;
margin-bottom: 0.5em;
margin-left: auto;
margin-right: auto;
border-style: inset;
border-width: 1.5px;">
<p style="color:Gainsboro; text-align: right;">By: larryprato@gmail.com</p>
"""
st.markdown(html_line, unsafe_allow_html=True)
| 44.131503 | 180 | 0.555879 | 3,836 | 30,539 | 4.327424 | 0.099583 | 0.03741 | 0.033253 | 0.04006 | 0.855663 | 0.831265 | 0.823675 | 0.808373 | 0.787771 | 0.774157 | 0 | 0.078758 | 0.252038 | 30,539 | 691 | 181 | 44.195369 | 0.647929 | 0.003078 | 0 | 0.751524 | 0 | 0.082317 | 0.454689 | 0.063395 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.006098 | 0 | 0.006098 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f31c5e2b121aa436b327e66e8b0788bdb1574de0 | 42,386 | py | Python | neural_network_lyapunov/relu_system.py | hongkai-dai/neural-network-lyapunov-1 | 8843c13f69f7f39cbb939ab250413e76f61843f6 | [
"MIT"
] | null | null | null | neural_network_lyapunov/relu_system.py | hongkai-dai/neural-network-lyapunov-1 | 8843c13f69f7f39cbb939ab250413e76f61843f6 | [
"MIT"
] | 3 | 2021-08-21T22:54:47.000Z | 2021-11-28T19:39:25.000Z | neural_network_lyapunov/relu_system.py | hongkai-dai/neural-network-lyapunov-1 | 8843c13f69f7f39cbb939ab250413e76f61843f6 | [
"MIT"
] | 1 | 2021-11-30T23:45:43.000Z | 2021-11-30T23:45:43.000Z | import torch
import neural_network_lyapunov.relu_to_optimization as relu_to_optimization
import neural_network_lyapunov.hybrid_linear_system as hybrid_linear_system
import neural_network_lyapunov.gurobi_torch_mip as gurobi_torch_mip
import neural_network_lyapunov.mip_utils as mip_utils
import gurobipy
class ReLUDynamicsConstraintReturn(
hybrid_linear_system.DynamicsConstraintReturn):
def __init__(self,
slack,
binary,
x_next_lb_IA=None,
x_next_ub_IA=None,
x_next_bound_prog=None,
x_next_bound_var=None):
super(ReLUDynamicsConstraintReturn,
self).__init__(slack, binary, x_next_lb_IA, x_next_ub_IA,
x_next_bound_prog, x_next_bound_var)
self.nn_input = None
self.nn_input_lo = None
self.nn_input_up = None
self.nn_output_lo = None
self.nn_output_up = None
self.relu_input_lo = None
self.relu_input_up = None
self.relu_output_lo = None
self.relu_output_up = None
def from_mip_cnstr_return(self, mip_cnstr_return: relu_to_optimization.
ReLUMixedIntegerConstraintsReturn,
nn_input: list):
assert (isinstance(
mip_cnstr_return,
relu_to_optimization.ReLUMixedIntegerConstraintsReturn))
self.nn_input = nn_input
self.nn_input_lo = mip_cnstr_return.nn_input_lo
self.nn_input_up = mip_cnstr_return.nn_input_up
self.nn_output_lo = mip_cnstr_return.nn_output_lo
self.nn_output_up = mip_cnstr_return.nn_output_up
self.relu_input_lo = mip_cnstr_return.relu_input_lo
self.relu_input_up = mip_cnstr_return.relu_input_up
self.relu_output_lo = mip_cnstr_return.relu_output_lo
self.relu_output_up = mip_cnstr_return.relu_output_up
def _add_dynamics_constraint_autonmous(mip_cnstr_return,
x_next_lb_IA,
x_next_ub_IA,
mip: gurobi_torch_mip.GurobiTorchMIP,
x_var,
x_next_var,
slack_var_name,
binary_var_name,
network_bound_propagate_method,
binary_var_type=gurobipy.GRB.BINARY):
"""
Args:
mip_cnstr_return: Returned from mixed_integer_constraints() function.
It encodes the constraint between x and x_next.
"""
dtype = mip.dtype
x_dim = len(x_var)
slack, binary = mip.add_mixed_integer_linear_constraints(
mip_cnstr_return, x_var, x_next_var, slack_var_name, binary_var_name,
"dynamics_ineq", "dynamics_eq", "dynamics_output", binary_var_type)
ret = ReLUDynamicsConstraintReturn(slack, binary)
ret.from_mip_cnstr_return(mip_cnstr_return, x_var)
if network_bound_propagate_method in (
mip_utils.PropagateBoundsMethod.IA,
mip_utils.PropagateBoundsMethod.IA_MIP):
ret.x_next_lb_IA = x_next_lb_IA
ret.x_next_ub_IA = x_next_ub_IA
if network_bound_propagate_method in (
mip_utils.PropagateBoundsMethod.LP,
mip_utils.PropagateBoundsMethod.MIP,
mip_utils.PropagateBoundsMethod.IA_MIP):
ret.x_next_bound_prog = gurobi_torch_mip.GurobiTorchMILP(dtype)
ret.x_next_bound_var = ret.x_next_bound_prog.addVars(
x_dim, lb=-gurobipy.GRB.INFINITY)
x_next_bound_x_var = ret.x_next_bound_prog.addVars(
x_dim, lb=-gurobipy.GRB.INFINITY)
x_next_bound_prog_binary_type = gurobipy.GRB.CONTINUOUS if \
network_bound_propagate_method == \
mip_utils.PropagateBoundsMethod.LP else gurobipy.GRB.BINARY
ret.x_next_bound_prog.add_mixed_integer_linear_constraints(
mip_cnstr_return, x_next_bound_x_var, ret.x_next_bound_var, "", "",
"", "", "", x_next_bound_prog_binary_type)
return ret
class AutonomousReLUSystem:
"""
This system models an autonomous using a feedforward
neural network with ReLU activations
x[n+1] = relu(x[n])
or
x_dot = relu(x)
"""
def __init__(
self,
dtype,
x_lo,
x_up,
dynamics_relu,
network_bound_propagate_method=mip_utils.PropagateBoundsMethod.IA):
"""
@param dtype The torch datatype
@param x_lo, x_up torch tensor that lower and upper bound the state
@param dynamics_relu torch model that represents the dynamics
"""
assert (len(x_lo) == len(x_up))
assert (torch.all(x_up >= x_lo))
assert (dynamics_relu[0].in_features == dynamics_relu[-1].out_features)
self.dtype = dtype
self.x_lo = x_lo
self.x_up = x_up
self.x_dim = len(self.x_lo)
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
self.network_bound_propagate_method = network_bound_propagate_method
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def mixed_integer_constraints(self) ->\
gurobi_torch_mip.MixedIntegerConstraintsReturn:
"""
@return mixed-integer linear constraints MixedIntegerConstraintsReturn
Ain_x, Ain_s, Ain_gamma, rhs_in,
Aeq_x, Aeq_s, Aeq_gamma, rhs_eq
such that
x[n+1] = Aout_s @ s + Cout or x_dot = Aout_s @ s + Cout
s.t.
Ain_x @ x + Ain_s @ s + Ain_gamma @ gamma <= rhs_in
Aeq_x @ x + Aeq_s @ s + Aeq_gamma @ gamma == rhs_eq
"""
result = self.dynamics_relu_free_pattern.output_constraint(
self.x_lo, self.x_up, self.network_bound_propagate_method)
return result
def possible_dx(self, x):
assert (isinstance(x, torch.Tensor))
assert (len(x) == self.x_dim)
return [self.dynamics_relu(x)]
def step_forward(self, x_start):
assert (isinstance(x_start, torch.Tensor))
return self.dynamics_relu(x_start)
def add_dynamics_constraint(self,
mip: gurobi_torch_mip.GurobiTorchMIP,
x_var,
x_next_var,
slack_var_name,
binary_var_name,
binary_var_type=gurobipy.GRB.BINARY):
mip_cnstr_return = self.mixed_integer_constraints()
if self.network_bound_propagate_method in (
mip_utils.PropagateBoundsMethod.IA,
mip_utils.PropagateBoundsMethod.IA_MIP):
x_next_lb_IA = mip_cnstr_return.nn_output_lo
x_next_ub_IA = mip_cnstr_return.nn_output_up
else:
x_next_lb_IA = None
x_next_ub_IA = None
return _add_dynamics_constraint_autonmous(
mip_cnstr_return, x_next_lb_IA, x_next_ub_IA, mip, x_var,
x_next_var, slack_var_name, binary_var_name,
self.network_bound_propagate_method, binary_var_type)
class AutonomousReLUSystemGivenEquilibrium:
"""
This system models an autonomous system with known equilibirum x* using
a feedforward neural network with ReLU activations
For discrete-time system
x[n+1] = ϕ(x[n]) − ϕ(x*) + x*
For continuous-time system
ẋ = ϕ(x) − ϕ(x*)
where ϕ is a feedforward (leaky) ReLU network.
"""
def __init__(
self,
dtype,
x_lo,
x_up,
dynamics_relu,
x_equilibrium,
discrete_time_flag=True,
network_bound_propagate_method=mip_utils.PropagateBoundsMethod.IA):
"""
@param dtype The torch datatype
@param x_lo, x_up torch tensor that lower and upper bound the state
@param dynamics_relu torch model that represents the dynamics
@param x_equilibrium The equilibrium state.
"""
assert (len(x_lo) == len(x_up))
assert (torch.all(x_up >= x_lo))
assert (dynamics_relu[0].in_features == dynamics_relu[-1].out_features)
self.dtype = dtype
self.x_lo = x_lo
self.x_up = x_up
self.x_dim = len(self.x_lo)
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
assert (x_equilibrium.shape == (self.x_dim, ))
self.x_equilibrium = x_equilibrium
self.discrete_time_flag = discrete_time_flag
self.network_bound_propagate_method = network_bound_propagate_method
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def mixed_integer_constraints(self):
"""
@return mixed-integer linear constraints MixedIntegerConstraintsReturn
Ain_x, Ain_s, Ain_gamma, rhs_in,
Aeq_x, Aeq_s, Aeq_gamma, rhs_eq
such that
x[n+1] = Aout_s @ s + Cout or x_dot = Aout_s @ s + Cout
s.t.
Ain_x @ x + Ain_s @ s + Ain_gamma @ gamma <= rhs_in
Aeq_x @ x + Aeq_s @ s + Aeq_gamma @ gamma == rhs_eq
"""
result = self.dynamics_relu_free_pattern.output_constraint(
self.x_lo, self.x_up, self.network_bound_propagate_method)
if self.discrete_time_flag:
result.Cout += -self.dynamics_relu(self.x_equilibrium) +\
self.x_equilibrium
else:
result.Cout += -self.dynamics_relu(self.x_equilibrium)
return result
def possible_dx(self, x):
assert (isinstance(x, torch.Tensor))
assert (len(x) == self.x_dim)
return [self.step_forward(x)]
def step_forward(self, x_start):
assert (isinstance(x_start, torch.Tensor))
if self.discrete_time_flag:
return self.dynamics_relu(x_start) - \
self.dynamics_relu(self.x_equilibrium) + self.x_equilibrium
else:
return self.dynamics_relu(x_start) - \
self.dynamics_relu(self.x_equilibrium)
def add_dynamics_constraint(self,
mip: gurobi_torch_mip.GurobiTorchMIP,
x_var,
x_next_var,
slack_var_name,
binary_var_name,
binary_var_type=gurobipy.GRB.BINARY):
mip_cnstr_return = self.mixed_integer_constraints()
if self.network_bound_propagate_method in (
mip_utils.PropagateBoundsMethod.IA,
mip_utils.PropagateBoundsMethod.IA_MIP):
relu_at_equilibrium = self.dynamics_relu(self.x_equilibrium)
x_next_lb_IA = mip_cnstr_return.nn_output_lo - relu_at_equilibrium\
+ (self.x_equilibrium if self.discrete_time_flag else 0.)
x_next_ub_IA = mip_cnstr_return.nn_output_up - relu_at_equilibrium\
+ (self.x_equilibrium if self.discrete_time_flag else 0.)
else:
x_next_lb_IA = None
x_next_ub_IA = None
return _add_dynamics_constraint_autonmous(
mip_cnstr_return, x_next_lb_IA, x_next_ub_IA, mip, x_var,
x_next_var, slack_var_name, binary_var_name,
self.network_bound_propagate_method, binary_var_type)
class AutonomousResidualReLUSystemGivenEquilibrium:
"""
This system models an autonomous system with known equilibirum x* using
a feedforward neural network with ReLU activations. The neural network
learn the residual dynamics
Discrete-time system
x[n+1] = ϕ(x[n]) − ϕ(x*) + x[n]
Continuous-time system
ẋ = ϕ(x) − ϕ(x*)
where ϕ is a feedforward (leaky) ReLU network.
"""
def __init__(
self,
dtype,
x_lo,
x_up,
dynamics_relu,
x_equilibrium,
discrete_time_flag=True,
network_bound_propagate_method=mip_utils.PropagateBoundsMethod.IA):
"""
@param dtype The torch datatype
@param x_lo, x_up torch tensor that lower and upper bound the state
@param dynamics_relu torch model that represents the dynamics
@param x_equilibrium The equilibrium state.
"""
assert (len(x_lo) == len(x_up))
assert (torch.all(x_up >= x_lo))
assert (dynamics_relu[0].in_features == dynamics_relu[-1].out_features)
self.dtype = dtype
self.x_lo = x_lo
self.x_up = x_up
self.x_dim = len(self.x_lo)
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
assert (x_equilibrium.shape == (self.x_dim, ))
self.x_equilibrium = x_equilibrium
self.discrete_time_flag = discrete_time_flag
self.network_bound_propagate_method = network_bound_propagate_method
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def mixed_integer_constraints(self):
"""
@return mixed-integer linear constraints MixedIntegerConstraintsReturn
Ain_x, Ain_s, Ain_gamma, rhs_in,
Aeq_x, Aeq_s, Aeq_gamma, rhs_eq
such that
x[n+1] = Aout_input @ x + Aout_s @ s + Cout
or x_dot = Aout_input @ x + Aout_s @ s + Cout
s.t.
Ain_x @ x + Ain_s @ s + Ain_gamma @ gamma <= rhs_in
Aeq_x @ x + Aeq_s @ s + Aeq_gamma @ gamma == rhs_eq
"""
result = self.dynamics_relu_free_pattern.output_constraint(
self.x_lo, self.x_up, self.network_bound_propagate_method)
result.Cout += -self.dynamics_relu(self.x_equilibrium)
if self.discrete_time_flag:
if result.Aout_input is None:
result.Aout_input = torch.eye(self.x_dim, dtype=self.dtype)
else:
result.Aout_input += torch.eye(self.x_dim, dtype=self.dtype)
return result
def possible_dx(self, x):
assert (isinstance(x, torch.Tensor))
assert (len(x) == self.x_dim)
return [self.step_forward(x)]
def step_forward(self, x_start):
assert (isinstance(x_start, torch.Tensor))
if self.discrete_time_flag:
return self.dynamics_relu(x_start) - \
self.dynamics_relu(self.x_equilibrium) + x_start
else:
return self.dynamics_relu(x_start) - \
self.dynamics_relu(self.x_equilibrium)
def add_dynamics_constraint(self,
mip: gurobi_torch_mip.GurobiTorchMIP,
x_var,
x_next_var,
slack_var_name,
binary_var_name,
binary_var_type=gurobipy.GRB.BINARY):
mip_cnstr_return = self.mixed_integer_constraints()
if self.network_bound_propagate_method in (
mip_utils.PropagateBoundsMethod.IA,
mip_utils.PropagateBoundsMethod.IA_MIP):
relu_at_equilibrium = self.dynamics_relu(self.x_equilibrium)
x_next_lb_IA = mip_cnstr_return.nn_output_lo - relu_at_equilibrium\
+ (self.x_lo if self.discrete_time_flag else 0.)
x_next_ub_IA = mip_cnstr_return.nn_output_up - relu_at_equilibrium\
+ (self.x_up if self.discrete_time_flag else 0.)
else:
x_next_lb_IA = None
x_next_ub_IA = None
return _add_dynamics_constraint_autonmous(
mip_cnstr_return, x_next_lb_IA, x_next_ub_IA, mip, x_var,
x_next_var, slack_var_name, binary_var_name,
self.network_bound_propagate_method, binary_var_type)
class ReLUSystem:
"""
This class represents either a discrete-time controlled system, whose
dynamics is
x[n+1] = f(x[n], u[n])
or a continuous time system with dynamics
ẋ = f(x, u)
where f is a feed-forward neural network with (leaky) ReLU activation
functions. x[n+1], x[n], and u[n] (or ẋ, x, u) satisfy piecewise linear
relationship, hence when they are bounded, they satisfy some mixed-integer
linear constraint.
"""
def __init__(self, dtype, x_lo, x_up, u_lo, u_up, dynamics_relu):
"""
@param x_lo The lower bound of x[n] and x[n+1]. This is only used in
forming the mixed-integer linear constraints.
@param x_up The upper bound of x[n] and x[n+1]. This is only used in
forming the mixed-integer linear constraints.
@param u_lo The lower bound of u[n]. This is only used in
forming the mixed-integer linear constraints.
@param u_up The upper bound of u[n]. This is only used in
forming the mixed-integer linear constraints.
@param dynamics_relu A feedforward neural network with (leaky) ReLU
activation units. The input to the network is the concatenation
[x[n], u[n]], and the output of the network is x[n+1].
"""
assert (len(x_lo.shape) == 1)
assert (x_lo.shape == x_up.shape)
assert (torch.all(x_lo <= x_up))
self.dtype = dtype
self.x_dim = x_lo.numel()
self.x_lo = x_lo
self.x_up = x_up
assert (len(u_lo.shape) == 1)
assert (u_lo.shape == u_up.shape)
assert (torch.all(u_lo <= u_up))
self.u_dim = u_lo.numel()
self.u_lo = u_lo
self.u_up = u_up
assert (dynamics_relu[0].in_features == self.x_dim + self.u_dim)
assert (dynamics_relu[-1].out_features == self.x_dim)
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def mixed_integer_constraints(self, u_lo=None, u_up=None):
"""
@return mixed-integer linear constraints MixedIntegerConstraintsReturn
Ain_x, Ain_u, Ain_s, Ain_gamma, rhs_in,
Aeq_x, Aeq_u, Aeq_s, Aeq_gamma, rhs_eq
such that
x[n+1] = Aout_s @ s + Cout or x_dot = Aout_s @ s + Cout
s.t.
Ain_x @ x + Ain_u @ u + Ain_s @ s + Ain_gamma @ gamma <= rhs_in
Aeq_x @ x + Aeq_u @ u + Aeq_s @ s + Aeq_gamma @ gamma == rhs_eq
"""
if u_lo is None:
u_lo = self.u_lo
if u_up is None:
u_up = self.u_up
xu_lo = torch.cat((self.x_lo, u_lo))
xu_up = torch.cat((self.x_up, u_up))
result = self.dynamics_relu_free_pattern.output_constraint(
xu_lo, xu_up, mip_utils.PropagateBoundsMethod.IA)
return result
def step_forward(self, x_start, u_start):
assert (isinstance(x_start, torch.Tensor))
assert (isinstance(u_start, torch.Tensor))
return self.dynamics_relu(torch.cat((x_start, u_start), dim=-1))
def possible_dx(self, x, u):
assert (isinstance(x, torch.Tensor))
assert (isinstance(u, torch.Tensor))
return [self.dynamics_relu(torch.cat((x, u), dim=-1))]
def add_dynamics_constraint(self,
mip,
x_var,
x_next_var,
u_var,
slack_var_name,
binary_var_name,
additional_u_lo: torch.Tensor = None,
additional_u_up: torch.Tensor = None,
binary_var_type=gurobipy.GRB.BINARY):
return _add_dynamics_mip_constraints(mip, self, x_var, x_next_var,
u_var, slack_var_name,
binary_var_name, additional_u_lo,
additional_u_up, binary_var_type)
class ReLUSystemGivenEquilibrium:
"""
Represent a forward dynamical system with given equilibrium x*, u*. The
dynamics is
x[n+1] = ϕ(x[n], u[n]) − ϕ(x*, u*) + x*
where ϕ is a feedforward (leaky) ReLU network.
x[n+1], x[n] and u[n] satisfy a piecewise affine relationship. When x[n],
u[n] are bounded, we can write this relationship using mixed-integer linear
constraints.
"""
def __init__(self, dtype, x_lo, x_up, u_lo, u_up, dynamics_relu,
x_equilibrium, u_equilibrium):
"""
@param x_lo The lower bound of x[n] and x[n+1]. This is only used in
forming the mixed-integer linear constraints.
@param x_up The upper bound of x[n] and x[n+1]. This is only used in
forming the mixed-integer linear constraints.
@param u_lo The lower bound of u[n]. This is only used in
forming the mixed-integer linear constraints.
@param u_up The upper bound of u[n]. This is only used in
forming the mixed-integer linear constraints.
@param dynamics_relu A feedforward neural network with (leaky) ReLU
activation units. The input to the network is the concatenation
[x[n], u[n]], and the output of the network is x[n+1].
@param x_equilibrium The equilibrium state.
@param u_equilibrium The control at the equilibrium.
"""
assert (len(x_lo.shape) == 1)
assert (x_lo.shape == x_up.shape)
assert (torch.all(x_lo <= x_up))
self.dtype = dtype
self.x_dim = x_lo.numel()
self.x_lo = x_lo
self.x_up = x_up
assert (len(u_lo.shape) == 1)
assert (u_lo.shape == u_up.shape)
assert (torch.all(u_lo <= u_up))
self.u_dim = u_lo.numel()
self.u_lo = u_lo
self.u_up = u_up
assert (dynamics_relu[0].in_features == self.x_dim + self.u_dim)
assert (dynamics_relu[-1].out_features == self.x_dim)
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
assert (x_equilibrium.shape == (self.x_dim, ))
assert (torch.all(x_lo <= x_equilibrium))
assert (torch.all(x_up >= x_equilibrium))
self.x_equilibrium = x_equilibrium
assert (u_equilibrium.shape == (self.u_dim, ))
assert (torch.all(u_lo <= u_equilibrium))
assert (torch.all(u_up >= u_equilibrium))
self.u_equilibrium = u_equilibrium
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def mixed_integer_constraints(self, u_lo=None, u_up=None):
"""
The relationship between x[n], u[n] and x[n+1] can be captured by mixed
-integer linear constraints.
"""
if u_lo is None:
u_lo = self.u_lo
if u_up is None:
u_up = self.u_up
xu_lo = torch.cat((self.x_lo, u_lo))
xu_up = torch.cat((self.x_up, u_up))
result = self.dynamics_relu_free_pattern.output_constraint(
xu_lo, xu_up, mip_utils.PropagateBoundsMethod.IA)
result.Aout_slack = result.Aout_slack.reshape((self.x_dim, -1))
result.Cout = result.Cout.reshape((-1))
result.Cout += -self.dynamics_relu(
torch.cat((self.x_equilibrium, self.u_equilibrium))) +\
self.x_equilibrium
return result
def step_forward(self, x_start, u_start):
assert (isinstance(x_start, torch.Tensor))
assert (isinstance(u_start, torch.Tensor))
x_next = self.dynamics_relu(torch.cat((x_start, u_start), dim=-1)) - \
self.dynamics_relu(torch.cat(
(self.x_equilibrium, self.u_equilibrium))) + self.x_equilibrium
return x_next
def possible_dx(self, x, u):
assert (isinstance(x, torch.Tensor))
assert (isinstance(u, torch.Tensor))
return [self.step_forward(x, u)]
def add_dynamics_constraint(self,
mip,
x_var,
x_next_var,
u_var,
slack_var_name,
binary_var_name,
additional_u_lo: torch.Tensor = None,
additional_u_up: torch.Tensor = None,
binary_var_type=gurobipy.GRB.BINARY):
return _add_dynamics_mip_constraints(mip, self, x_var, x_next_var,
u_var, slack_var_name,
binary_var_name, additional_u_lo,
additional_u_up, binary_var_type)
class ReLUSecondOrderSystemGivenEquilibrium:
"""
For a second order system
q̇ = v
v̇ = f(q, v, u)
We use a fully connected network with (leaky) ReLU activation unit ϕ to
approximate its second order dynamics (in discrete time), as
v[n+1] = ϕ(q[n], v[n], u[n]) − ϕ(q*, v*, u*)
For the update on q, we use mid-point interpolation
q[n+1] = q[n] + (v[n] + v[n+1]) / 2 * dt
@note at the equilibrium, v should be 0.
"""
def __init__(self, dtype, x_lo: torch.Tensor, x_up: torch.Tensor,
u_lo: torch.Tensor, u_up: torch.Tensor, dynamics_relu,
q_equilibrium: torch.Tensor, u_equilibrium: torch.Tensor,
dt: float):
"""
@param x_lo The lower bound of state x = [q; v].
@param x_up The upper bound of state x = [q; v].
@param u_lo The lower bound of the input u.
@param u_up The upper bound of the input u.
@param dynamics_relu A fully connected network with (leaky) ReLU
activation units. The input to the network is (q, v, u), the output of
the network is of same dimension as v.
@param q_equilibrium The equilibrium position.
@param u_equilibrium The control at the equilibrium.
@param dt The integration time step.
"""
self.dtype = dtype
self.x_dim = x_lo.numel()
assert (isinstance(x_lo, torch.Tensor))
self.x_lo = x_lo
assert (isinstance(x_up, torch.Tensor))
assert (x_up.shape == (self.x_dim, ))
self.x_up = x_up
self.u_dim = u_lo.numel()
assert (isinstance(u_lo, torch.Tensor))
self.u_lo = u_lo
assert (isinstance(u_up, torch.Tensor))
assert (u_up.shape == (self.u_dim, ))
self.u_up = u_up
assert (dynamics_relu[0].in_features == self.x_dim + self.u_dim)
self.nv = dynamics_relu[-1].out_features
self.nq = self.x_dim - self.nv
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
assert (isinstance(q_equilibrium, torch.Tensor))
assert (q_equilibrium.shape == (self.nq, ))
self.q_equilibrium = q_equilibrium
self.x_equilibrium = torch.cat(
(self.q_equilibrium, torch.zeros((self.nv, ), dtype=self.dtype)))
assert (torch.all(self.x_equilibrium >= self.x_lo))
assert (torch.all(self.x_equilibrium <= self.x_up))
assert (isinstance(u_equilibrium, torch.Tensor))
assert (u_equilibrium.shape == (self.u_dim, ))
self.u_equilibrium = u_equilibrium
assert (torch.all(self.u_equilibrium >= self.u_lo))
assert (torch.all(self.u_equilibrium <= self.u_up))
assert (isinstance(dt, float))
assert (dt > 0)
self.dt = dt
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def mixed_integer_constraints(self, u_lo=None, u_up=None) ->\
gurobi_torch_mip.MixedIntegerConstraintsReturn:
"""
The relationship between x[n], u[n] and x[n+1] can be captured by mixed
-integer linear constraints.
Please refer to gurobi_torch_mip.MixedIntegerConstraintsReturn for the
meaning of each term in the output.
"""
# `result` contains the mixed integer linear constraint to write
# ϕ(q[n], v[n], u[n]) as a function
# ϕ(q[n], v[n], u[n]) = result.Aout_slack * s + result.Cout
# whee s is the slack variable.
# For the constraint v[n+1] = ϕ(q[n], v[n], u[n]) − ϕ(q*, v*, u*)
# This is equivalent to
# v[n+1] = result.Aout_slack * s + result.Cout - ϕ(q*, v*, u*)
if u_lo is None:
u_lo = self.u_lo
if u_up is None:
u_up = self.u_up
result = self.dynamics_relu_free_pattern.output_constraint(
torch.cat((self.x_lo, u_lo)), torch.cat((self.x_up, u_up)),
mip_utils.PropagateBoundsMethod.IA)
assert (result.Aout_input is None)
assert (result.Aout_binary is None)
if (len(result.Aout_slack.shape) == 1):
result.Aout_slack = result.Aout_slack.reshape((1, -1))
if (len(result.Cout.shape) == 0):
result.Cout = result.Cout.reshape((-1))
result.Cout -= self.dynamics_relu(
torch.cat((self.x_equilibrium, self.u_equilibrium)))
# We also need to add the output constraint
# q[n+1] = q[n] + (v[n] + v[n+1]) * dt / 2
# = [I dt/2*I 0] * [q[n]; v[n]; u[n]] +
# + (result.Aout_slack*dt/2) * s + result.Cout*dt/2
result.Aout_input = torch.cat(
(torch.cat((torch.eye(self.nq, dtype=self.dtype),
self.dt / 2 * torch.eye(self.nv, dtype=self.dtype),
torch.zeros((self.nq, self.u_dim), dtype=self.dtype)),
dim=1),
torch.zeros(
(self.nv, self.x_dim + self.u_dim), dtype=self.dtype)),
dim=0)
result.Aout_slack = torch.cat(
(result.Aout_slack * self.dt / 2, result.Aout_slack), dim=0)
result.Cout = torch.cat((result.Cout * self.dt / 2, result.Cout),
dim=0)
return result
def step_forward(self, x_start, u_start):
# Compute x[n+1] according to
# v[n+1] = ϕ(q[n], v[n], u[n]) − ϕ(q*, v*, u*)
# q[n+1] = q[n] + (v[n] + v[n+1]) * dt / 2
assert (isinstance(x_start, torch.Tensor))
assert (isinstance(u_start, torch.Tensor))
v_next = self.dynamics_relu(torch.cat((x_start, u_start), dim=-1)) - \
self.dynamics_relu(torch.cat(
(self.x_equilibrium, self.u_equilibrium)))
if len(x_start.shape) == 1:
q_next = x_start[:self.nq] + (x_start[self.nq:] + v_next) *\
self.dt / 2
else:
# batch of x_start and u_start
q_next = x_start[:, :self.nq] + (x_start[:, self.nq:] + v_next) *\
self.dt / 2
x_next = torch.cat((q_next, v_next), dim=-1)
return x_next
def possible_dx(self, x, u):
assert (isinstance(x, torch.Tensor))
assert (isinstance(u, torch.Tensor))
return [self.step_forward(x, u)]
def add_dynamics_constraint(self,
mip,
x_var,
x_next_var,
u_var,
slack_var_name,
binary_var_name,
additional_u_lo: torch.Tensor = None,
additional_u_up: torch.Tensor = None,
binary_var_type=gurobipy.GRB.BINARY):
return _add_dynamics_mip_constraints(mip, self, x_var, x_next_var,
u_var, slack_var_name,
binary_var_name, additional_u_lo,
additional_u_up, binary_var_type)
class ReLUSecondOrderResidueSystemGivenEquilibrium:
"""
A second order system whose dynamics is represented by
q[n+1] = q[n] + (v[n] + v[n+1]) * dt / 2
v[n+1] - v[n] = ϕ(x̅[n], u[n]) − ϕ(x̅*, u*)
Note that for the update on the velocity, we use the network to only
represent the "residue" part v[n+1] - v[n], not v[n+1] directly.
x̅ is a partial state of x. For many system, the "residue" part only
depends on part of the state. For example, for a system that is shift
invariant (such as a car), its acceleration doesn't depend on the location
of the car.
"""
def __init__(self, dtype, x_lo: torch.Tensor, x_up: torch.Tensor,
u_lo: torch.Tensor, u_up: torch.Tensor,
dynamics_relu: torch.nn.Sequential,
q_equilibrium: torch.Tensor, u_equilibrium: torch.Tensor,
dt: float, network_input_x_indices: list):
"""
@param dynamics_relu A fully connected network that takes the input as
a partial state and the control, and outputs the change of velocity
v[n+1] - v[n] = ϕ(x̅[n], u[n]) − ϕ(x̅*, u*)
@param q_equilibrium The equilibrium position.
@param u_equilibrium The control at the equilibrium.
@param dt The integration time step.
@param network_input_x_indices The partial state
x̅=x[network_input_x_indices]
"""
self.dtype = dtype
self.x_dim = x_lo.numel()
assert (isinstance(x_lo, torch.Tensor))
self.x_lo = x_lo
assert (isinstance(x_up, torch.Tensor))
assert (x_up.shape == (self.x_dim, ))
self.x_up = x_up
self.u_dim = u_lo.numel()
assert (isinstance(u_lo, torch.Tensor))
self.u_lo = u_lo
assert (isinstance(u_up, torch.Tensor))
assert (u_up.shape == (self.u_dim, ))
self.u_up = u_up
assert (isinstance(network_input_x_indices, list))
assert (dynamics_relu[0].in_features == len(network_input_x_indices) +
self.u_dim)
self.nv = dynamics_relu[-1].out_features
self.nq = self.x_dim - self.nv
self.dynamics_relu = dynamics_relu
self.dynamics_relu_free_pattern = relu_to_optimization.ReLUFreePattern(
dynamics_relu, dtype)
assert (isinstance(q_equilibrium, torch.Tensor))
assert (q_equilibrium.shape == (self.nq, ))
self.q_equilibrium = q_equilibrium
self.x_equilibrium = torch.cat(
(self.q_equilibrium, torch.zeros((self.nv, ), dtype=self.dtype)))
assert (torch.all(self.x_equilibrium >= self.x_lo))
assert (torch.all(self.x_equilibrium <= self.x_up))
assert (isinstance(u_equilibrium, torch.Tensor))
assert (u_equilibrium.shape == (self.u_dim, ))
self.u_equilibrium = u_equilibrium
assert (torch.all(self.u_equilibrium >= self.u_lo))
assert (torch.all(self.u_equilibrium <= self.u_up))
assert (isinstance(dt, float))
assert (dt > 0)
self.dt = dt
self._network_input_x_indices = network_input_x_indices
self.network_bound_propagate_method = \
mip_utils.PropagateBoundsMethod.IA
@property
def x_lo_all(self):
return self.x_lo.detach().numpy()
@property
def x_up_all(self):
return self.x_up.detach().numpy()
def step_forward(self, x_start, u_start):
"""
Compute x[n+1] according to
q[n+1] = q[n] + (v[n] + v[n+1]) * dt / 2
v[n+1] - v[n] = ϕ(x̅[n], u[n]) − ϕ(x̅*, u*)
"""
assert (isinstance(x_start, torch.Tensor))
assert (isinstance(u_start, torch.Tensor))
if len(x_start.shape) == 1:
q_start = x_start[:self.nq]
v_start = x_start[self.nq:]
v_next = v_start + self.dynamics_relu(torch.cat((
x_start[self._network_input_x_indices], u_start))) -\
self.dynamics_relu(torch.cat((self.x_equilibrium[
self._network_input_x_indices], self.u_equilibrium)))
q_next = q_start + (v_start + v_next) * self.dt / 2
return torch.cat((q_next, v_next))
elif len(x_start.shape) == 2:
# batch of data.
q_start = x_start[:, :self.nq]
v_start = x_start[:, self.nq:]
v_next = v_start + self.dynamics_relu(torch.cat((x_start[
:, self._network_input_x_indices], u_start), dim=-1)) -\
self.dynamics_relu(torch.cat((self.x_equilibrium[
self._network_input_x_indices], self.u_equilibrium)))
q_next = q_start + (v_start + v_next) * self.dt / 2
return torch.cat((q_next, v_next), dim=-1)
def possible_dx(self, x, u):
assert (isinstance(x, torch.Tensor))
assert (isinstance(u, torch.Tensor))
return [self.step_forward(x, u)]
def add_dynamics_constraint(self,
mip: gurobi_torch_mip.GurobiTorchMIP,
x_var,
x_next_var,
u_var,
slack_var_name,
binary_var_name,
additional_u_lo: torch.Tensor = None,
additional_u_up: torch.Tensor = None,
binary_var_type=gurobipy.GRB.BINARY):
u_lo = self.u_lo if additional_u_lo is None else torch.max(
self.u_lo, additional_u_lo)
u_up = self.u_up if additional_u_up is None else torch.min(
self.u_up, additional_u_up)
mip_cnstr_result = self.dynamics_relu_free_pattern.\
output_constraint(torch.cat((self.x_lo[
self._network_input_x_indices], u_lo)), torch.cat((
self.x_up[self._network_input_x_indices], u_up)),
self.network_bound_propagate_method)
# First add mip_cnstr_result, but don't impose the constraint on the
# output of the network (we will impose the constraint separately)
input_vars = [x_var[i] for i in self._network_input_x_indices] + u_var
forward_slack, forward_binary = \
mip.add_mixed_integer_linear_constraints(
mip_cnstr_result, input_vars, None, slack_var_name,
binary_var_name, "residue_forward_dynamics_ineq",
"residue_forward_dynamics_eq", None, binary_var_type)
# We want to impose the constraint
# v[n+1] = v[n] + ϕ(x̅[n], u[n]) − ϕ(x̅*, u*)
# = v[n] + Aout_slack * s + Cout - ϕ(x̅*, u*)
assert (mip_cnstr_result.Aout_input is None)
assert (mip_cnstr_result.Aout_binary is None)
if len(mip_cnstr_result.Aout_slack.shape) == 1:
mip_cnstr_result.Aout_slack = mip_cnstr_result.Aout_slack.reshape(
(1, -1))
if len(mip_cnstr_result.Cout.shape) == 0:
mip_cnstr_result.Cout = mip_cnstr_result.Cout.reshape((-1))
v_next = x_next_var[self.nq:]
v_curr = x_var[self.nq:]
mip.addMConstrs(
[
torch.eye(self.nv, dtype=self.dtype),
-torch.eye(self.nv, dtype=self.dtype),
-mip_cnstr_result.Aout_slack
], [v_next, v_curr, forward_slack],
b=mip_cnstr_result.Cout - self.dynamics_relu(
torch.cat((self.x_equilibrium[self._network_input_x_indices],
self.u_equilibrium))),
sense=gurobipy.GRB.EQUAL,
name="residue_forward_dynamics_output")
# Now add the constraint
# q[n+1] - q[n] = (v[n+1] + v[n]) * dt / 2
q_next = x_next_var[:self.nq]
q_curr = x_var[:self.nq]
mip.addMConstrs([
torch.eye(self.nq,
dtype=self.dtype), -torch.eye(self.nq, dtype=self.dtype),
-self.dt / 2 * torch.eye(self.nv, dtype=self.dtype),
-self.dt / 2 * torch.eye(self.nv, dtype=self.dtype)
], [q_next, q_curr, v_next, v_curr],
b=torch.zeros((self.nv), dtype=self.dtype),
sense=gurobipy.GRB.EQUAL,
name="update_q_next")
ret = ReLUDynamicsConstraintReturn(forward_slack, forward_binary)
ret.from_mip_cnstr_return(mip_cnstr_result, input_vars)
return ret
def _add_dynamics_mip_constraints(mip,
relu_system,
x_var,
x_next_var,
u_var,
slack_var_name,
binary_var_name,
additional_u_lo: torch.Tensor = None,
additional_u_up: torch.Tensor = None,
binary_var_type=gurobipy.GRB.BINARY):
u_lo = relu_system.u_lo if additional_u_lo is None else torch.max(
relu_system.u_lo, additional_u_lo)
u_up = relu_system.u_up if additional_u_up is None else torch.min(
relu_system.u_up, additional_u_up)
mip_cnstr = relu_system.mixed_integer_constraints(u_lo, u_up)
input_vars = x_var + u_var
slack, binary = mip.add_mixed_integer_linear_constraints(
mip_cnstr, input_vars, x_next_var, slack_var_name, binary_var_name,
"relu_forward_dynamics_ineq", "relu_forward_dynamics_eq",
"relu_forward_dynamics_output", binary_var_type)
ret = ReLUDynamicsConstraintReturn(slack, binary)
ret.from_mip_cnstr_return(mip_cnstr, input_vars)
return ret
| 43.696907 | 79 | 0.588944 | 5,678 | 42,386 | 4.104262 | 0.049137 | 0.026819 | 0.033642 | 0.027806 | 0.842688 | 0.803296 | 0.774931 | 0.738543 | 0.711723 | 0.693486 | 0 | 0.003914 | 0.318902 | 42,386 | 969 | 80 | 43.742002 | 0.802452 | 0.194074 | 0 | 0.703216 | 0 | 0 | 0.006606 | 0.005023 | 0 | 0 | 0 | 0 | 0.143275 | 1 | 0.076023 | false | 0 | 0.008772 | 0.024854 | 0.163743 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b211dd393de044ea6536681ce81710e2ae12f024 | 7,979 | py | Python | nmutant_model/network.py | asplos2020/DRTest | c3de497142d9b226e518a1a0f95f7350d2f7acd6 | [
"MIT"
] | 1 | 2021-04-01T07:31:17.000Z | 2021-04-01T07:31:17.000Z | nmutant_model/network.py | Justobe/DRTest | 85c3c9b2a46cafa7184130f2596c5f9eb3b20bff | [
"MIT"
] | null | null | null | nmutant_model/network.py | Justobe/DRTest | 85c3c9b2a46cafa7184130f2596c5f9eb3b20bff | [
"MIT"
] | 1 | 2020-12-24T12:12:54.000Z | 2020-12-24T12:12:54.000Z | """
A pure TensorFlow implementation of a neural network. This can be
used as a drop-in replacement for a Keras model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import tensorflow as tf
from nmutant_model.model import Model
from nmutant_model.layer import *
class MLP(Model):
"""
An example of a bare bones multilayer perceptron (MLP) class.
"""
def __init__(self, layers, input_shape):
super(MLP, self).__init__()
self.layer_names = []
self.layers = layers
self.input_shape = input_shape
if isinstance(layers[-1], Softmax):
layers[-1].name = 'probs'
layers[-2].name = 'logits'
else:
layers[-1].name = 'logits'
for i, layer in enumerate(self.layers):
if hasattr(layer, 'name'):
name = layer.name
else:
name = layer.__class__.__name__ + str(i)
layer.name = name
self.layer_names.append(name)
layer.set_input_shape(input_shape)
input_shape = layer.get_output_shape()
def fprop(self, x, set_ref=False):
states = []
for layer in self.layers:
if set_ref:
layer.ref = x
x = layer.fprop(x)
assert x is not None
states.append(x)
states = dict(zip(self.get_layer_names(), states))
return states
class ResidualModel(Model):
def __init__(self, layers, input_shape):
self.layer_names = []
# self.layers = layers
self.layers = []
self.layers_list = layers
self.input_shape = input_shape
if isinstance(layers[-1], Softmax):
layers[-1].name = 'probs'
layers[-2].name = 'logits'
else:
layers[-1].name = 'logits'
for i, layer in enumerate(self.layers_list):
if isinstance(layer, Layer):
self.layers.append(layer)
if hasattr(layer, 'name'):
name = layer.name
else:
name = layer.__class__.__name__ + str(i)
layer.name = name
self.layer_names.append(name)
layer.set_input_shape(input_shape)
input_shape = layer.get_output_shape()
else:
output_shape = None
for j, l in enumerate(layer):
if isinstance(l, Layer):
self.layers.append(l)
if hasattr(l, 'name'):
name = l.name
else:
name = l.__class__.__name__ + str(i) + '_' + str(j)
l.name = name
self.layer_names.append(name)
l.set_input_shape(output_shape)
input_shape = l.get_output_shape()
else:
sl_input = input_shape
for k, sl in enumerate(l):
self.layers.append(sl)
if hasattr(sl, 'name'):
name = sl.name
else:
name = sl.__class__.__name__ + str(i) + '_' + str(j) + '_' + str(k)
sl.name = name
self.layer_names.append(name)
sl.set_input_shape(sl_input)
sl_input = sl.get_output_shape()
output_shape = sl_input
def fprop(self, x):
states = []
for layer in self.layers_list:
if isinstance(layer, Layer):
x = layer.fprop(x)
assert x is not None
states.append(x)
else:
temp = []
for l in layer:
if isinstance(l, Layer):
x = temp[0] + temp[1]
x = l.fprop(x)
assert x is not None
states.append(x)
temp.append(x)
else:
t_x = x
for sl in l:
t_x = sl.fprop(t_x)
assert t_x is not None
states.append(t_x)
temp.append(t_x)
states = dict(zip(self.get_layer_names(), states))
return states
class EnsembleModel(Model):
def __init__(self, layers, input_shape):
self.layer_names = []
# self.layers = layers
self.layers = []
self.layers_list = layers
self.input_shape = input_shape
if isinstance(layers[-1], Softmax):
layers[-1].name = 'probs'
layers[-2].name = 'logits'
else:
layers[-1].name = 'logits'
for i, layer in enumerate(self.layers_list):
if isinstance(layer, Layer):
self.layers.append(layer)
if hasattr(layer, 'name'):
name = layer.name
else:
name = layer.__class__.__name__ + str(i)
layer.name = name
self.layer_names.append(name)
layer.set_input_shape(input_shape)
input_shape = layer.get_output_shape()
else:
last = 0
for j, l in enumerate(layer):
if isinstance(l, Layer):
self.layers.append(l)
if hasattr(l, 'name'):
name = l.name
else:
name = l.__class__.__name__ + str(i) + '_' + str(j)
l.name = name
self.layer_names.append(name)
l.set_input_shape(input_shape)
output_shape = l.get_output_shape()
last += output_shape[-1]
else:
s_input = input_shape
for k, sl in enumerate(l):
self.layers.append(sl)
if hasattr(sl, 'name'):
name = sl.name
else:
name = sl.__class__.__name__ + str(i) + '_' + str(j) + '_' + str(k)
sl.name = name
self.layer_names.append(name)
sl.set_input_shape(s_input)
s_input = sl.get_output_shape()
output_shape = s_input
last += output_shape[-1]
output_shape = list(output_shape)
output_shape[-1] = last
input_shape = tuple(output_shape)
def fprop(self, x):
states = []
for layer in self.layers_list:
if isinstance(layer, Layer):
x = layer.fprop(x)
assert x is not None
states.append(x)
else:
temp = []
for l in layer:
if isinstance(l, Layer):
t_x = l.fprop(x)
assert t_x is not None
states.append(t_x)
temp.append(t_x)
else:
t_x = x
for sl in l:
t_x = sl.fprop(t_x)
assert t_x is not None
states.append(t_x)
temp.append(t_x)
x = tf.concat(temp, 3)
states = dict(zip(self.get_layer_names(), states))
return states
| 36.104072 | 99 | 0.442537 | 818 | 7,979 | 4.064792 | 0.112469 | 0.081203 | 0.049624 | 0.06015 | 0.80782 | 0.790075 | 0.766917 | 0.740451 | 0.740451 | 0.740451 | 0 | 0.004521 | 0.473242 | 7,979 | 220 | 100 | 36.268182 | 0.786581 | 0.027447 | 0 | 0.78836 | 0 | 0 | 0.010988 | 0 | 0 | 0 | 0 | 0 | 0.037037 | 1 | 0.031746 | false | 0 | 0.042328 | 0 | 0.10582 | 0.005291 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b2625b36ec1c52dfef01907bfab377b13612cd1e | 13,614 | py | Python | api/tests/test_products.py | MurungaKibaara/Ecommerce-Django-Rest-API | 3133adc0fa119169b8c52a2fa4bf581c520ff2da | [
"MIT"
] | null | null | null | api/tests/test_products.py | MurungaKibaara/Ecommerce-Django-Rest-API | 3133adc0fa119169b8c52a2fa4bf581c520ff2da | [
"MIT"
] | null | null | null | api/tests/test_products.py | MurungaKibaara/Ecommerce-Django-Rest-API | 3133adc0fa119169b8c52a2fa4bf581c520ff2da | [
"MIT"
] | null | null | null | import io
import json
import base64
import pytest
import numpy
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from io import BytesIO
from django.urls import reverse
from django.contrib.auth import get_user_model
from rest_framework import status
from rest_framework.test import APITestCase
from rest_framework.test import APIClient
from rest_framework.authtoken.models import Token
# Mock Image
from PIL import Image, ImageDraw
from base64 import decodestring
# Local Import
from ..models import Product
UserModel = get_user_model()
# TODO: UPDATE CATEGORY INFORMATION IN TESTS
class TestProduct(APITestCase):
def setUp(self):
self.url = '/api/v1/products'
self.product_url = '/api/v1/products/1'
self.client = APIClient()
self.user_data = {
"role":"manufacturer",
"email":"murungaephantus@gmail.com",
"username":"murungaephantus",
"name":"Murunga Kibaara",
"password":"securepassword",
"password2":"securepassword",
"region":"Nairobi"
}
self.user3_data = {
"role":"customer",
"email":"murungaephantusk@gmail.com",
"username":"murungaephantus",
"name":"Murunga Kibaara",
"password":"securepassword",
"password2":"securepassword",
"region":"Nairobi"
}
self.user2_data = {
"role":"manufacturer",
"email":"murungakibaara@gmail.com",
"username":"murungaephantus",
"name":"Murunga Kibaara",
"password":"securepassword",
"password2":"securepassword",
"region":"Nairobi"
}
self.user4_data = {
"role":"trader",
"email":"murungakibaara@gmail.com",
"username":"murungaephantus",
"name":"Murunga Kibaara",
"password":"securepassword",
"password2":"securepassword",
"region":"Nairobi"
}
self.user5_data = {
"role":"wholesaler",
"email":"murungakibaara@gmail.com",
"username":"murungaephantus",
"name":"Murunga Kibaara",
"password":"securepassword",
"password2":"securepassword",
"region":"Nairobi"
}
self.category_data ={
"name":"Food Items",
}
# Products Data including Image Data
#
# with open('eretail/tests/pil_red.png', 'rb') as img1:
#
# imgStringIO1 = Image.open(BytesIO(img1.read()))
self.product_data = {
"category":1,
"product_name": "1KG Sugar",
"product_description": "Mumias Sugar",
"product_price": 198,
"product_quantity": 200,
"image": "(imgStringIO1)"
}
self.product_data_no_key = {
"category":1,
"product_description": "Mumias Sugar",
"product_price": 500,
"product_quantity": 200,
"image": "(imgStringIO1)"
}
self.product_data_update = {
"category":1,
"product_name": "1KG Sugar",
"product_description": "Mumias Sugar",
"product_price": 500,
"product_quantity": 200,
"image": "(imgStringIO1)"
}
@pytest.mark.django_db
def create_user(self):
response = self.client.post('/api/accounts/register/',data=json.dumps(self.user_data),content_type='application/json')
token = response.data['access']
return token
@pytest.mark.django_db
def create_user2(self):
response = self.client.post('/api/accounts/register/',data=json.dumps(self.user2_data),content_type='application/json')
token = response.data['access']
return token
@pytest.mark.django_db
def create_user3(self):
response = self.client.post('/api/accounts/register/',data=json.dumps(self.user3_data),content_type='application/json')
token = response.data['access']
return token
@pytest.mark.django_db
def create_user4(self):
response = self.client.post('/api/accounts/register/',data=json.dumps(self.user4_data),content_type='application/json')
token = response.data['access']
return token
@pytest.mark.django_db
def create_user5(self):
response = self.client.post('/api/accounts/register/',data=json.dumps(self.user5_data),content_type='application/json')
token = response.data['access']
return token
# @pytest.mark.django_db
# def test_a_registered_user_can_create_a_product(self):
# token = self.create_user()
# self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
#
# resp = self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
# response = self.client.post(self.url, data=self.product_data, format='multipart', follow=True)
#
# # import pdb; pdb.set_trace()
# # response = self.client.get(self.url, follow=True)
# last_url, status_code = response.redirect_chain[-1]
# print(response.redirect_chain, last_url, status_code)
# # print("Response", response.data['results'])
# self.assertEqual(response.status_code, 201, 'A product has been created')
# # self.assertContains(response, 'product does not exist',status_code=404)
# @pytest.mark.django_db
# def test_can_get_created_products(self):
# token = self.create_user()
# self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
# self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
# self.client.post(self.url, data=self.product_data, content_type='multipart/form-data', follow=True)
# product_resp = self.client.get(self.url)
# self.assertEqual(product_resp.status_code, status.HTTP_200_OK, 'A product exists')
@pytest.mark.django_db
def test_dont_update_products_that_dont_exist(self):
token = self.create_user()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
product_resp = self.client.put(self.product_url, data=self.product_data, content_type='multipart/form-data', follow=True)
self.assertEqual(product_resp.status_code, status.HTTP_404_NOT_FOUND, 'Product does not exist')
# #
# @pytest.mark.django_db
# def test_can_update_existing_products(self):
# token = self.create_user()
# self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
# self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
# response = self.client.post(self.url, data=self.product_data, content_type='multipart/form-data', follow=True)
# product_resp = self.client.put(self.product_url, data=self.product_data_update, content_type='multipart/form-data', follow=True)
# self.assertEqual(product_resp.status_code, status.HTTP_200_OK, 'Product can be updated')
@pytest.mark.django_db
def test_dont_create_with_missing_key(self):
token = self.create_user()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
product_resp = self.client.post('/api/v1/products/', data=self.product_data_no_key, content_type='multipart/form-data', follow=True)
self.assertEqual(product_resp.status_code, status.HTTP_400_BAD_REQUEST, 'Product can be updated')
@pytest.mark.django_db
def test_non_logged_in_cant_create(self):
product_resp = self.client.post('/api/v1/products/', data=self.product_data_no_key, content_type='multipart/form-data', follow=True)
self.assertEqual(product_resp.status_code, status.HTTP_400_BAD_REQUEST, 'Non user cant create products')
@pytest.mark.django_db
def test_empty_returns_404(self):
token = self.create_user()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
product_resp = self.client.get('/api/v1/products/1/')
self.assertEqual(product_resp.status_code, status.HTTP_404_NOT_FOUND, 'Product can be updated')
# @pytest.mark.django_db
# def test_return_one_product(self):
# token = self.create_user()
# self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
# self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
# self.client.post('/api/v1/products/', data=self.product_data, content_type='multipart/form-data', follow=True)
# product_resp = self.client.get('/api/v1/products/1/')
# self.assertEqual(product_resp.status_code, status.HTTP_200_OK, 'Product can be updated')
@pytest.mark.django_db
def test_different_product_owners_read(self):
token = self.create_user()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
self.client.post('/api/v1/products/', data=self.product_data, content_type='multipart/form-data', follow=True)
token2 = self.create_user2()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token2)
product_resp = self.client.get('/api/v1/products/1/')
self.assertEqual(product_resp.status_code, status.HTTP_404_NOT_FOUND, 'A user cannot access another users products')
@pytest.mark.django_db
def test_diffrent_product_owners_update(self):
token = self.create_user()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
self.client.post('/api/v1/products/', data=self.product_data, content_type='multipart/form-data', follow=True)
token2 = self.create_user2()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token2)
product_resp = self.client.put('/api/v1/products/1/', data=self.product_data, content_type='multipart/form-data', follow=True)
self.assertEqual(product_resp.status_code, status.HTTP_404_NOT_FOUND, 'A user cannot update another users products')
# @pytest.mark.django_db
# def test_a_user_can_delete_a_product(self):
# token = self.create_user()
# self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
# self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
# response= self.client.post(self.url, data=self.product_data, content_type='multipart/form-data', follow=True)
# product_resp = self.client.delete(self.product_url, follow=True)
# self.assertEqual(product_resp.status_code, status.HTTP_204_NO_CONTENT, 'A user can delete a product')
@pytest.mark.django_db
def test_delete_different_owner_products(self):
token = self.create_user()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
self.client.post('/api/v1/products/', data=self.product_data, content_type='multipart/form-data', follow=True)
token2 = self.create_user2()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token2)
product_resp = self.client.delete('/api/v1/products/1/', data=self.product_data, content_type='multipart/form-data', follow=True)
self.assertEqual(product_resp.status_code, status.HTTP_404_NOT_FOUND, 'A user cannot delete another users products')
@pytest.mark.django_db
def test_a_customer_cannot_create_a_product(self):
token = self.create_user3()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
response = self.client.post('/api/v1/products/', self.product_data, content_type='multipart/form-data', follow=True)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, 'A product cannot be be created')
#
@pytest.mark.django_db
def test_a_trader_cannot_create_a_product(self):
token = self.create_user4()
self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
response = self.client.post('/api/v1/products/', self.product_data, content_type='multipart/form-data', follow=True)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, 'A product cannot be be created')
#
# @pytest.mark.django_db
# def test_a_wholesaler_can_create_a_product(self):
# token = self.create_user5()
# self.client.credentials(HTTP_AUTHORIZATION = 'Bearer ' + token)
# self.client.post('/api/v1/categories/', data=json.dumps(self.category_data), content_type='application/json')
# response = self.client.post('/api/v1/products/', data=self.product_data, content_type='multipart/form-data', follow=True)
# self.assertEqual(response.status_code, status.HTTP_201_CREATED, 'A product cannot be be created')
| 45.07947 | 140 | 0.675481 | 1,652 | 13,614 | 5.369855 | 0.107748 | 0.066509 | 0.048923 | 0.051742 | 0.832037 | 0.812197 | 0.799121 | 0.784804 | 0.763612 | 0.758539 | 0 | 0.013581 | 0.194138 | 13,614 | 301 | 141 | 45.229236 | 0.795005 | 0.276774 | 0 | 0.571429 | 0 | 0 | 0.233555 | 0.024348 | 0 | 0 | 0 | 0.003322 | 0.047619 | 1 | 0.079365 | false | 0.05291 | 0.095238 | 0 | 0.206349 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
b27379319784ddd8d3a633ed291f71da0dfeb14b | 14,326 | py | Python | shariar12.py | shazada-shariar/addpro | 8f8dd2fde1c5f33a7338d20c36a3c39a19cc123f | [
"BSD-3-Clause"
] | null | null | null | shariar12.py | shazada-shariar/addpro | 8f8dd2fde1c5f33a7338d20c36a3c39a19cc123f | [
"BSD-3-Clause"
] | null | null | null | shariar12.py | shazada-shariar/addpro | 8f8dd2fde1c5f33a7338d20c36a3c39a19cc123f | [
"BSD-3-Clause"
] | null | null | null | import base64
exec(base64.b16decode('66726F6D2074656C6574686F6E20696D706F72742054656C656772616D436C69656E740A66726F6D2074656C6574686F6E2E746C2E66756E6374696F6E732E6D6573736167657320696D706F7274204765744469616C6F6773526571756573740A66726F6D2074656C6574686F6E2E746C2E747970657320696D706F727420496E70757450656572456D7074792C20496E707574506565724368616E6E656C2C20496E70757450656572557365720A66726F6D2074656C6574686F6E2E6572726F72732E7270636572726F726C69737420696D706F7274202850656572466C6F6F644572726F722C20557365724E6F744D757475616C436F6E746163744572726F72202C0A2020202020202020202020202020202020202020202020202020202020202020202020202020202020205573657250726976616379526573747269637465644572726F722C20557365724368616E6E656C73546F6F4D7563684572726F722C0A20202020202020202020202020202020202020202020202020202020202020202020202020202020202055736572426F744572726F722C20496E7075745573657244656163746976617465644572726F72290A66726F6D2074656C6574686F6E2E746C2E66756E6374696F6E732E6368616E6E656C7320696D706F727420496E76697465546F4368616E6E656C526571756573740A696D706F72742074696D652C206F732C207379732C206A736F6E0A0A7774203D20280A202020202727272020202020202020202020202020202020202020202020202020202020202020202020202020202020202020202020200A20205B5B67725D5D203A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A20202020202020200A5B5B72655D5D2020203A20202020202020E280A2202020202020202020203A2020202020202020202020E280A220202020202020203A0A205B5B79655D5D20203A2020202020202020202020202020202020203A20202020202020202020202020202020202020203A200A5B5B72655D5D2020203A2020532055204C20542041204E20202020203A20202020532048204120522049204120522020203A0A5B5B72655D5D2020203A2020202020202020202020202020202020203A20202020202020202020202020202020202020203A200A20205B5B67725D5D203A20202020202020C2B0202020202020202020203A2020202020202020202020C2B020202020202020203A0A205B5B72655D5D20203A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A3A200A0A20205B5B79655D5D205448495320495320434152455445442042592053484152494152205B5B72655D5D2054414E4B5320464F52205553452E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E2E0A20205B5B79655D5D20535F555F4C5F545F415F4E5F5F5F5F535F485F415F525F495F415F520A20205B5B72655D5D436172657465642042792028532055204C20542041204EE280A2E280A2532048204120522049204120522920200A202054656C656772616D3A205465726D757820436F6D6D616E64730A202020202727270A290A434F4C4F5253203D207B0A20202020227265223A20225C75303031625B33313B316D222C0A20202020226772223A20225C75303031625B33326D222C0A20202020227965223A20225C75303031625B33333B316D222C0A7D0A7265203D20225C75303031625B33313B316D220A6772203D20225C75303031625B33326D220A7965203D20225C75303031625B33333B316D220A64656620636F6C6F72546578742874657874293A0A20202020666F7220636F6C6F7220696E20434F4C4F52533A0A202020202020202074657874203D20746578742E7265706C61636528225B5B22202B20636F6C6F72202B20225D5D222C20434F4C4F52535B636F6C6F725D290A2020202072657475726E20746578740A636C65617254797065203D20696E7075742867722B27287465726D696E616C206F7220636D642E2028742F63293A2027292E6C6F77657228290A696620636C65617254797065203D3D202774273A0A20202020636C656172203D206C616D6264613A6F732E73797374656D2827636C65617227290A656C696620636C65617254797065203D3D202763273A0A20202020636C656172203D206C616D6264613A6F732E73797374656D2827636C7327290A656C73653A0A202020207072696E742827496E76616C696420696E70757421212127290A202020207379732E6578697428290A202020200A6966207379732E76657273696F6E5F696E666F5B305D203C20333A0A2020202074656C6574203D206C616D626461203A6F732E73797374656D282770697020696E7374616C6C202D552074656C6574686F6E27290A656C6966207379732E76657273696F6E5F696E666F5B305D203E3D20333A0A2020202074656C6574203D206C616D626461203A6F732E73797374656D28277069703320696E7374616C6C202D552074656C6574686F6E27290A0A74656C657428290A74696D652E736C6565702831290A636C65617228290A0A6966206F732E706174682E697366696C652827657266616E346C785F6C6F672E74787427293A0A2020202077697468206F70656E2827657266616E346C785F6C6F672E747874272C202772272920617320723A0A202020202020202064617461203D20722E726561646C696E657328290A202020206170695F6964203D20646174615B305D0A202020206170695F68617368203D20646174615B315D0A0A656C73653A0A202020206170695F6964203D20696E7075742872652B27456E746572206170695F69643A2027290A202020206170695F68617368203D20696E7075742879652B27456E746572206170695F686173683A2027290A2020202077697468206F70656E2827657266616E346C785F6C6F672E747874272C202777272920617320613A0A2020202020202020612E7772697465286170695F6964202B20275C6E27202B206170695F68617368290A0A636C69656E74203D2054656C656772616D436C69656E742827657266616E346C78272C206170695F69642C206170695F68617368290A0A6173796E6320646566206D61696E28293A0A202020206173796E6320646566206765746D656D28293A0A2020202020202020636C65617228290A20202020202020207072696E7428636F6C6F725465787428777429290A20202020202020207072696E74282727290A20202020202020207072696E74282727290A20202020202020200A20202020202020207072696E742879652B275BC3975D2043686F6F736520796F7572206368616E6E656C20746F20416464206D656D626572732E27290A2020202020202020613D300A2020202020202020666F72206920696E206368616E6E656C3A0A2020202020202020202020207072696E742867722B275B272B7374722861292B275D272C20692E7469746C65290A20202020202020202020202061202B3D20310A20202020202020206F707431203D20696E7428696E7075742879652B27456E7465722061206E756D6265723A202729290A20202020202020206D795F7061727469636970616E7473203D20617761697420636C69656E742E6765745F7061727469636970616E7473286368616E6E656C5B6F7074315D290A20202020202020207461726765745F67726F75705F656E74697479203D20496E707574506565724368616E6E656C286368616E6E656C5B6F7074315D2E69642C206368616E6E656C5B6F7074315D2E6163636573735F68617368290A20202020202020206D795F7061727469636970616E74735F6964203D205B5D0A2020202020202020666F72206D795F7061727469636970616E7420696E206D795F7061727469636970616E74733A0A2020202020202020202020206D795F7061727469636970616E74735F69642E617070656E64286D795F7061727469636970616E742E6964290A202020202020202077697468206F70656E2827657266616E346C785F6D656D626572732E747874272C202772272920617320723A0A2020202020202020202020207573657273203D206A736F6E2E6C6F61642872290A2020202020202020636F756E74203D20310A202020202020202069203D20300A2020202020202020666F72207573657220696E2075736572733A0A202020202020202020202020696620636F756E74253530203D3D20303A0A20202020202020202020202020202020636C65617228290A202020202020202020202020202020207072696E7428636F6C6F725465787428777429290A202020202020202020202020202020207072696E74282727290A202020202020202020202020202020207072696E74282727290A202020202020202020202020202020207072696E742879652B22706C65617365207761697420666F722031206D696E7574652E2E2E22290A2020202020202020202020202020202074696D652E736C656570283630290A202020202020202020202020656C696620636F756E74203E3D203330303A0A20202020202020202020202020202020617761697420636C69656E742E646973636F6E6E65637428290A20202020202020202020202020202020627265616B0A202020202020202020202020656C69662069203E3D20383A0A20202020202020202020202020202020617761697420636C69656E742E646973636F6E6E65637428290A20202020202020202020202020202020627265616B0A202020202020202020202020636F756E742B3D310A20202020202020202020202074696D652E736C6565702831290A202020202020202020202020696620757365725B27756964275D20696E206D795F7061727469636970616E74735F69643A0A202020202020202020202020202020207072696E742867722B27557365722070726573656E742E20536B697070696E672E27290A20202020202020202020202020202020636F6E74696E75650A202020202020202020202020656C73653A0A202020202020202020202020202020207472793A0A2020202020202020202020202020202020202020757365725F746F5F616464203D20496E707574506565725573657228757365725B27756964275D2C20757365725B276163636573735F68617368275D290A2020202020202020202020202020202020202020616464203D20617761697420636C69656E7428496E76697465546F4368616E6E656C52657175657374287461726765745F67726F75705F656E746974792C5B757365725F746F5F6164645D29290A20202020202020202020202020202020202020207072696E742867722B27416464656420272C2073747228757365725B27756964275D29290A20202020202020202020202020202020202020200A202020202020202020202020202020206578636570742050656572466C6F6F644572726F723A0A20202020202020202020202020202020202020207072696E742872652B2247657474696E6720466C6F6F64204572726F722066726F6D2074656C656772616D2E205363726970742069732073746F7070696E67206E6F772E20506C656173652074727920616761696E20616674657220736F6D652074696D652E22290A202020202020202020202020202020202020202069202B3D20310A20202020202020202020202020202020657863657074205573657250726976616379526573747269637465644572726F723A0A20202020202020202020202020202020202020207072696E742872652B225468652075736572277320707269766163792073657474696E677320646F206E6F7420616C6C6F7720796F7520746F20646F20746869732E20536B697070696E672E22290A202020202020202020202020202020202020202069203D20300A202020202020202020202020202020206578636570742055736572426F744572726F723A0A20202020202020202020202020202020202020207072696E742872652B2243616E27742061646420426F742E20536B697070696E672E22290A202020202020202020202020202020202020202069203D20300A2020202020202020202020202020202065786365707420496E7075745573657244656163746976617465644572726F723A0A20202020202020202020202020202020202020207072696E742872652B22546865207370656369666965642075736572207761732064656C657465642E20536B697070696E672E22290A202020202020202020202020202020202020202069203D20300A2020202020202020202020202020202065786365707420557365724368616E6E656C73546F6F4D7563684572726F723A0A20202020202020202020202020202020202020207072696E742872652B225573657220696E20746F6F206D756368206368616E6E656C2E20536B697070696E672E22290A2020202020202020202020202020202065786365707420557365724E6F744D757475616C436F6E746163744572726F723A0A20202020202020202020202020202020202020207072696E742872652B274D757475616C204E6F2E20536B69707065642E27290A202020202020202020202020202020202020202069203D20300A2020202020202020202020202020202065786365707420457863657074696F6E20617320653A0A20202020202020202020202020202020202020207072696E742872652B224572726F723A222C2065290A20202020202020202020202020202020202020207072696E742822547279696E6720746F20636F6E74696E75652E2E2E22290A202020202020202020202020202020202020202069202B3D20310A2020202020202020202020202020202020202020636F6E74696E75650A202020200A202020207072696E7428636F6C6F725465787428777429290A202020206368617473203D205B5D0A202020206368616E6E656C203D205B5D0A20202020726573756C74203D20617761697420636C69656E74284765744469616C6F677352657175657374280A20202020202020206F66667365745F646174653D4E6F6E652C0A20202020202020206F66667365745F69643D302C0A20202020202020206F66667365745F706565723D496E70757450656572456D70747928292C0A20202020202020206C696D69743D3230302C0A2020202020202020686173683D300A2020202029290A2020202063686174732E657874656E6428726573756C742E6368617473290A20202020666F72206120696E2063686174733A0A20202020202020207472793A0A202020202020202020202020696620547275653A0A202020202020202020202020202020206368616E6E656C2E617070656E642861290A20202020202020206578636570743A0A202020202020202020202020636F6E74696E75650A0A2020202061203D20300A202020207072696E74282727290A202020207072696E74282727290A202020207072696E742879652B2743686F6F736520612067726F757020746F207363726170652A53554C54414E2A2E27290A20202020666F72206920696E206368616E6E656C3A0A20202020202020207072696E742867722B275B272B7374722861292B275D272C20692E7469746C65290A202020202020202061202B3D20310A202020206F70203D20696E7075742879652B27456E7465722061206E756D62657220286F7220707265737320454E54455220746F20736B6970293A2027290A202020206966206F70203D3D2027273A0A20202020202020207072696E742879652B274F6B2E20736B697070696E672E2E2E27290A202020202020202074696D652E736C6565702831290A20202020202020206177616974206765746D656D28290A20202020202020207379732E6578697428290A20202020656C73653A200A2020202020202020706173730A202020206F7074203D20696E74286F70290A202020207072696E74282727290A202020207072696E742879652B275B2B5D204665746368696E67204D656D626572732E2E2E27290A2020202074696D652E736C6565702831290A202020207461726765745F67726F7570203D206368616E6E656C5B6F70745D0A20202020616C6C5F7061727469636970616E7473203D205B5D0A202020206D656D5F64657461696C73203D205B5D0A20202020616C6C5F7061727469636970616E7473203D20617761697420636C69656E742E6765745F7061727469636970616E7473287461726765745F67726F7570290A20202020666F72207573657220696E20616C6C5F7061727469636970616E74733A0A20202020202020207472793A0A202020202020202020202020696620757365722E757365726E616D653A0A20202020202020202020202020202020757365726E616D65203D20757365722E757365726E616D650A202020202020202020202020656C73653A0A20202020202020202020202020202020757365726E616D65203D2022220A202020202020202020202020696620757365722E66697273745F6E616D653A0A2020202020202020202020202020202066697273746E616D65203D20757365722E66697273745F6E616D650A202020202020202020202020656C73653A0A2020202020202020202020202020202066697273746E616D65203D2022220A202020202020202020202020696620757365722E6C6173745F6E616D653A0A202020202020202020202020202020206C6173746E616D65203D20757365722E6C6173745F6E616D650A202020202020202020202020656C73653A0A202020202020202020202020202020206C6173746E616D65203D2022220A0A2020202020202020202020206E65775F6D656D203D207B0A2020202020202020202020202020202027756964273A20757365722E69642C0A2020202020202020202020202020202027757365726E616D65273A20757365726E616D652C0A202020202020202020202020202020202766697273746E616D65273A2066697273746E616D652C0A20202020202020202020202020202020276C6173746E616D65273A206C6173746E616D652C0A20202020202020202020202020202020276163636573735F68617368273A20757365722E6163636573735F686173680A2020202020202020202020207D0A2020202020202020202020206D656D5F64657461696C732E617070656E64286E65775F6D656D290A20202020202020206578636570742056616C75654572726F723A0A202020202020202020202020636F6E74696E75650A202020200A2020202077697468206F70656E2827657266616E346C785F6D656D626572732E747874272C202777272920617320773A0A20202020202020206A736F6E2E64756D70286D656D5F64657461696C732C2077290A2020202074696D652E736C6565702831290A202020207072696E742879652B27506C6561736520776169742E2E2E2E2E27290A2020202074696D652E736C6565702833290A20202020646F6E65203D20696E7075742867722B275B2B5D204D656D626572732073637261706564207375636365737366756C6C792E2028507265737320656E74657220746F20416464206D656D626572732927290A202020206177616974206765746D656D28290A0A20202020617761697420636C69656E742E646973636F6E6E65637428290A0A7769746820636C69656E743A0A20202020636C69656E742E6C6F6F702E72756E5F756E74696C5F636F6D706C657465286D61696E2829290A')) | 7,163 | 14,312 | 0.999372 | 6 | 14,326 | 2,386.166667 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.886484 | 0.00014 | 14,326 | 2 | 14,312 | 7,163 | 0.113027 | 0 | 0 | 0 | 0 | 0 | 0.997138 | 0.997138 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 12 |
b27b592a02c598722ab5458d9692ad8ca0ce2def | 6,299 | py | Python | src/abaqus/BoundaryCondition/VelocityBCState.py | Haiiliin/PyAbaqus | f20db6ebea19b73059fe875a53be370253381078 | [
"MIT"
] | 7 | 2022-01-21T09:15:45.000Z | 2022-02-15T09:31:58.000Z | src/abaqus/BoundaryCondition/VelocityBCState.py | Haiiliin/PyAbaqus | f20db6ebea19b73059fe875a53be370253381078 | [
"MIT"
] | null | null | null | src/abaqus/BoundaryCondition/VelocityBCState.py | Haiiliin/PyAbaqus | f20db6ebea19b73059fe875a53be370253381078 | [
"MIT"
] | null | null | null | from abaqusConstants import *
from .BoundaryConditionState import BoundaryConditionState
class VelocityBCState(BoundaryConditionState):
"""The VelocityBCState object stores the propagating data for a velocity boundary condition
in a step. One instance of this object is created internally by the VelocityBC object
for each step. The instance is also deleted internally by the VelocityBC object.
The VelocityBCState object has no constructor or methods.
The VelocityBCState object is derived from the BoundaryConditionState object.
Attributes
----------
v1: float
A Float specifying the velocity component in the 1-direction.
v2: float
A Float specifying the velocity component in the 2-direction.
v3: float
A Float specifying the velocity component in the 3-direction.
vr1: float
A Float specifying the rotational velocity component about the 1-direction.
vr2: float
A Float specifying the rotational velocity component about the 2-direction.
vr3: float
A Float specifying the rotational velocity component about the 3-direction.
v1State: SymbolicConstant
A SymbolicConstant specifying the propagation state of the velocity component in the
1-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
v2State: SymbolicConstant
A SymbolicConstant specifying the propagation state of the velocity component in the
2-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
v3State: SymbolicConstant
A SymbolicConstant specifying the propagation state of the velocity component in the
3-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
vr1State: SymbolicConstant
A SymbolicConstant specifying the propagation state of the rotational velocity component
about the 1-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
vr2State: SymbolicConstant
A SymbolicConstant specifying the propagation state of the rotational velocity component
about the 2-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
vr3State: SymbolicConstant
A SymbolicConstant specifying the propagation state of the rotational velocity component
about the 3-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
amplitudeState: SymbolicConstant
A SymbolicConstant specifying the propagation state of the amplitude reference. Possible
values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
status: SymbolicConstant
A SymbolicConstant specifying the propagation state of the :py:class:`~abaqus.BoundaryCondition.BoundaryConditionState.BoundaryConditionState` object. Possible values are:
NOT_YET_ACTIVE
CREATED
PROPAGATED
MODIFIED
DEACTIVATED
NO_LONGER_ACTIVE
TYPE_NOT_APPLICABLE
INSTANCE_NOT_APPLICABLE
PROPAGATED_FROM_BASE_STATE
MODIFIED_FROM_BASE_STATE
DEACTIVATED_FROM_BASE_STATE
BUILT_INTO_MODES
amplitude: str
A String specifying the name of the amplitude reference. The String is empty if the
boundary condition has no amplitude reference.
Notes
-----
This object can be accessed by:
.. code-block:: python
import load
mdb.models[name].steps[name].boundaryConditionStates[name]
The corresponding analysis keywords are:
- BOUNDARY
"""
# A Float specifying the velocity component in the 1-direction.
v1: float = None
# A Float specifying the velocity component in the 2-direction.
v2: float = None
# A Float specifying the velocity component in the 3-direction.
v3: float = None
# A Float specifying the rotational velocity component about the 1-direction.
vr1: float = None
# A Float specifying the rotational velocity component about the 2-direction.
vr2: float = None
# A Float specifying the rotational velocity component about the 3-direction.
vr3: float = None
# A SymbolicConstant specifying the propagation state of the velocity component in the
# 1-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
v1State: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the velocity component in the
# 2-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
v2State: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the velocity component in the
# 3-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
v3State: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the rotational velocity component
# about the 1-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
vr1State: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the rotational velocity component
# about the 2-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
vr2State: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the rotational velocity component
# about the 3-direction. Possible values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
vr3State: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the amplitude reference. Possible
# values are UNSET, SET, UNCHANGED, FREED, and MODIFIED.
amplitudeState: SymbolicConstant = None
# A SymbolicConstant specifying the propagation state of the BoundaryConditionState object. Possible values are:
# NOT_YET_ACTIVE
# CREATED
# PROPAGATED
# MODIFIED
# DEACTIVATED
# NO_LONGER_ACTIVE
# TYPE_NOT_APPLICABLE
# INSTANCE_NOT_APPLICABLE
# PROPAGATED_FROM_BASE_STATE
# MODIFIED_FROM_BASE_STATE
# DEACTIVATED_FROM_BASE_STATE
# BUILT_INTO_MODES
status: SymbolicConstant = None
# A String specifying the name of the amplitude reference. The String is empty if the
# boundary condition has no amplitude reference.
amplitude: str = ''
| 43.743056 | 179 | 0.73472 | 748 | 6,299 | 6.136364 | 0.147059 | 0.084967 | 0.094118 | 0.104575 | 0.846841 | 0.833333 | 0.833333 | 0.833333 | 0.833333 | 0.815468 | 0 | 0.009806 | 0.222893 | 6,299 | 143 | 180 | 44.048951 | 0.927886 | 0.825052 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.111111 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
b27cc5a4668e757ca0f7e04df2ad74b5ad64851d | 16,270 | py | Python | azure-cognitiveservices-vision-contentmoderator/azure/cognitiveservices/vision/contentmoderator/operations/list_management_image_operations.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | 4 | 2016-06-17T23:25:29.000Z | 2022-03-30T22:37:45.000Z | azure-cognitiveservices-vision-contentmoderator/azure/cognitiveservices/vision/contentmoderator/operations/list_management_image_operations.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | 2 | 2016-09-30T21:40:24.000Z | 2017-11-10T18:16:18.000Z | azure-cognitiveservices-vision-contentmoderator/azure/cognitiveservices/vision/contentmoderator/operations/list_management_image_operations.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | 3 | 2016-05-03T20:49:46.000Z | 2017-10-05T21:05:27.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class ListManagementImageOperations(object):
"""ListManagementImageOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def add_image(
self, list_id, tag=None, label=None, custom_headers=None, raw=False, **operation_config):
"""Add an image to the list with list Id equal to list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param tag: Tag for the image.
:type tag: int
:param label: The image label.
:type label: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Image or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Image
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/imagelists/{listId}/images'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url", self.config.base_url, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if tag is not None:
query_parameters['tag'] = self._serialize.query("tag", tag, 'int')
if label is not None:
query_parameters['label'] = self._serialize.query("label", label, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Image', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_all_images(
self, list_id, custom_headers=None, raw=False, **operation_config):
"""Deletes all images from the list with list Id equal to list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: str or ClientRawResponse if raw=true
:rtype: str or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/imagelists/{listId}/images'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url", self.config.base_url, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_all_image_ids(
self, list_id, custom_headers=None, raw=False, **operation_config):
"""Gets all image Ids from the list with list Id equal to list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: ImageIds or ClientRawResponse if raw=true
:rtype:
~azure.cognitiveservices.vision.contentmoderator.models.ImageIds or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/imagelists/{listId}/images'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url", self.config.base_url, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ImageIds', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete_image(
self, list_id, image_id, custom_headers=None, raw=False, **operation_config):
"""Deletes an image from the list with list Id and image Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param image_id: Id of the image.
:type image_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: str or ClientRawResponse if raw=true
:rtype: str or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/imagelists/{listId}/images/{ImageId}'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url", self.config.base_url, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str'),
'ImageId': self._serialize.url("image_id", image_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def add_image_url_input(
self, list_id, content_type, tag=None, label=None, data_representation="URL", value=None, custom_headers=None, raw=False, **operation_config):
"""Add an image to the list with list Id equal to list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param content_type: The content type.
:type content_type: str
:param tag: Tag for the image.
:type tag: int
:param label: The image label.
:type label: str
:param data_representation:
:type data_representation: str
:param value:
:type value: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Image or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Image
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
image_url = models.BodyModel(data_representation=data_representation, value=value)
# Construct URL
url = '/contentmoderator/lists/v1.0/imagelists/{listId}/images'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url", self.config.base_url, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if tag is not None:
query_parameters['tag'] = self._serialize.query("tag", tag, 'int')
if label is not None:
query_parameters['label'] = self._serialize.query("label", label, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
# Construct body
body_content = self._serialize.body(image_url, 'BodyModel')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Image', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def add_image_file_input(
self, list_id, image_stream, tag=None, label=None, custom_headers=None, raw=False, callback=None, **operation_config):
"""Add an image to the list with list Id equal to list Id passed.
:param list_id: List Id of the image list.
:type list_id: str
:param image_stream: The image file.
:type image_stream: Generator
:param tag: Tag for the image.
:type tag: int
:param label: The image label.
:type label: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param callback: When specified, will be called with each chunk of
data that is streamed. The callback should take two arguments, the
bytes of the current chunk of data and the response object. If the
data is uploading, response will be None.
:type callback: Callable[Bytes, response=None]
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Image or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.contentmoderator.models.Image
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.contentmoderator.models.APIErrorException>`
"""
# Construct URL
url = '/contentmoderator/lists/v1.0/imagelists/{listId}/images'
path_format_arguments = {
'baseUrl': self._serialize.url("self.config.base_url", self.config.base_url, 'str', skip_quote=True),
'listId': self._serialize.url("list_id", list_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if tag is not None:
query_parameters['tag'] = self._serialize.query("tag", tag, 'int')
if label is not None:
query_parameters['label'] = self._serialize.query("label", label, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'image/gif'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._client.stream_upload(image_stream, callback)
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Image', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| 40.982368 | 154 | 0.654026 | 1,799 | 16,270 | 5.749861 | 0.093385 | 0.027262 | 0.020108 | 0.013921 | 0.848511 | 0.843968 | 0.841937 | 0.841937 | 0.841937 | 0.825696 | 0 | 0.00442 | 0.24917 | 16,270 | 396 | 155 | 41.085859 | 0.842338 | 0.356853 | 0 | 0.796512 | 0 | 0 | 0.105307 | 0.035662 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040698 | false | 0 | 0.011628 | 0 | 0.133721 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a264b5eff2e7749014a56d8b704f4805b6c50f76 | 142 | py | Python | reinforcement_learning/sac/__init__.py | mizolotu/DonkeyCarExperiments | 3d6be742915efe51c0f5abda4c69a4349a555373 | [
"MIT"
] | null | null | null | reinforcement_learning/sac/__init__.py | mizolotu/DonkeyCarExperiments | 3d6be742915efe51c0f5abda4c69a4349a555373 | [
"MIT"
] | null | null | null | reinforcement_learning/sac/__init__.py | mizolotu/DonkeyCarExperiments | 3d6be742915efe51c0f5abda4c69a4349a555373 | [
"MIT"
] | null | null | null | from reinforcement_learning.sac.sac import SAC
from reinforcement_learning.sac.policies import MlpPolicy, CnnPolicy, LnMlpPolicy, LnCnnPolicy
| 47.333333 | 94 | 0.873239 | 17 | 142 | 7.176471 | 0.588235 | 0.278689 | 0.409836 | 0.459016 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.077465 | 142 | 2 | 95 | 71 | 0.931298 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
a272030c4bcac23a18317d69b637b66ce7f44e2a | 201,081 | py | Python | lib/kb_hmmer/kb_hmmerImpl.py | kbaseapps/kb_hmmer | 3527ec7cd6839ff217d8c96d2cc0b9bff4d62b08 | [
"MIT"
] | 1 | 2020-01-13T19:29:54.000Z | 2020-01-13T19:29:54.000Z | lib/kb_hmmer/kb_hmmerImpl.py | kbaseapps/kb_hmmer | 3527ec7cd6839ff217d8c96d2cc0b9bff4d62b08 | [
"MIT"
] | 47 | 2018-09-25T19:17:57.000Z | 2021-03-03T07:17:07.000Z | lib/kb_hmmer/kb_hmmerImpl.py | kbaseapps/kb_hmmer | 3527ec7cd6839ff217d8c96d2cc0b9bff4d62b08 | [
"MIT"
] | 5 | 2017-05-03T22:16:44.000Z | 2020-09-28T21:13:58.000Z | # -*- coding: utf-8 -*-
#BEGIN_HEADER
import os
import sys
import shutil
import hashlib
import subprocess
import requests
import re
import traceback
import uuid
from datetime import datetime
import json
from pprint import pprint, pformat
import numpy as np
import math
import gzip
from installed_clients.WorkspaceClient import Workspace
# SDK Utils
from installed_clients.KBaseDataObjectToFileUtilsClient import KBaseDataObjectToFileUtils
from installed_clients.DataFileUtilClient import DataFileUtil as DFUClient
from installed_clients.KBaseReportClient import KBaseReport
# HMMER Utils
from kb_hmmer.Utils.HmmerUtil import HmmerUtil
# silence whining
import requests
requests.packages.urllib3.disable_warnings()
#END_HEADER
class kb_hmmer:
'''
Module Name:
kb_hmmer
Module Description:
** A KBase module: kb_hmmer
**
** This module contains HMMER Hidden Markov Model Sequence Search and Alignment
**
'''
######## WARNING FOR GEVENT USERS ####### noqa
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
######################################### noqa
VERSION = "1.8.0"
GIT_URL = "https://github.com/kbaseapps/kb_hmmer"
GIT_COMMIT_HASH = "bd8914359e506b3cb3b8f49855d50d325463a664"
#BEGIN_CLASS_HEADER
workspaceURL = None
shockURL = None
handleURL = None
callbackURL = None
scratch = None
HMMER_BIN = os.path.join(os.sep, 'kb', 'module', 'hmmer', 'bin')
HMMER_BUILD = os.path.join(HMMER_BIN, 'hmmbuild') # construct profile HMM(s) from MSA(s)
HMMER_MAKE_DB = os.path.join(HMMER_BIN, 'makehmmerdb') # build a HMMER binary db from a seq file
HMMER_SEARCH = os.path.join(HMMER_BIN, 'hmmsearch') # search profile(s) against a sequence db
HMMER_PHMMER = os.path.join(HMMER_BIN, 'phmmer') # search protein sequence(s) against a protein sequence db
HMMER_NHMMER = os.path.join(HMMER_BIN, 'nhmmer') # search nuc sequence(s) against a nuc sequence db
HMMER_JACKHAMMER = os.path.join(HMMER_BIN, 'jackhmmer') # iteratively search sequence(s) against a protein db
#HMMER_ALIGN = '/kb/module/hmmer/binaries/hmmalign' # align sequences to a profile HMM
#HMMER_PRESS = '/kb/module/hmmer/binaries/hmmpress' # prepare HMM db for hmmscan
#HMMER_SCAN = '/kb/module/hmmer/binaries/hmmscan' # scan prot sequence(s) against protein profile db
#HMMER_NSCAN = '/kb/module/hmmer/binaries/nhmmscan' # scan nuc sequence(s) against nuc profile db
# target is a list for collecting log messages
def log(self, target, message):
# we should do something better here...
if target is not None:
target.append(message)
print(message)
sys.stdout.flush()
# Helper script borrowed from the transform service, logger removed
#
def _parse_genome_and_feature_id_from_hit_id(self,
hit_id,
target_type,
target_ref,
genome_id_feature_id_delim):
genome_ref = None
feature_id = None
if target_type == 'Genome' or target_type == 'AnnotatedMetagenomeAssembly':
genome_ref = target_ref
feature_id = hit_id
else:
[genome_ref, feature_id] = hit_id.split(genome_id_feature_id_delim)
return [genome_ref, feature_id]
def _check_MSA_sequence_type_correct(self, MSA_in, row_order, seq_type):
PROT_MSA_pattern = re.compile("^[\.\-_acdefghiklmnpqrstvwyACDEFGHIKLMNPQRSTVWYxX ]+$")
DNA_MSA_pattern = re.compile("^[\.\-_ACGTUXNRYSWKMBDHVacgtuxnryswkmbdhv \t\n]+$")
this_appropriate_sequence_found_in_MSA_input = True
msa_invalid_msgs = []
# Check for PROTEIN sequence type
#
if seq_type.startswith('P') or seq_type.startswith('p'):
if 'sequence_type' in MSA_in and (MSA_in['sequence_type'] == 'dna' or MSA_in['sequence_type'] == 'DNA'):
this_appropriate_sequence_found_in_MSA_input = False
else:
for row_id in row_order:
#self.log(console, row_id+": '"+MSA_in['alignment'][row_id]+"'") # DEBUG
if DNA_MSA_pattern.match(MSA_in['alignment'][row_id]):
self.log(msa_invalid_msgs,
"Finding nucleotide instead of protein sequences in MSA. " +
"BAD record for MSA row_id: " + row_id + "\n" + MSA_in['alignment'][row_id] + "\n")
this_appropriate_sequence_found_in_MSA_input = False
break
elif not PROT_MSA_pattern.match(MSA_in['alignment'][row_id]):
self.log(msa_invalid_msgs,
"Not finding protein sequence in MSA. " +
"BAD record for MSA row_id: " + row_id + "\n" + MSA_in['alignment'][row_id] + "\n")
this_appropriate_sequence_found_in_MSA_input = False
break
# Check for NUCLEOTIDE sequence type
#
elif seq_type.startswith('N') or seq_type.startswith('n'):
if 'sequence_type' in MSA_in and (MSA_in['sequence_type'] != 'dna' and MSA_in['sequence_type'] != 'DNA'):
this_appropriate_sequence_found_in_MSA_input = False
else:
for row_id in row_order:
#self.log(console, row_id+": '"+MSA_in['alignment'][row_id]+"'") # DEBUG
if not DNA_MSA_pattern.match(MSA_in['alignment'][row_id]):
self.log(msa_invalid_msgs,
"Not Finding nucleotide in MSA. " +
"BAD record for MSA row_id: " + row_id + "\n" + MSA_in['alignment'][row_id] + "\n")
this_appropriate_sequence_found_in_MSA_input = False
break
elif PROT_MSA_pattern.match(MSA_in['alignment'][row_id]):
self.log(msa_invalid_msgs,
"Finding protein sequence instead of nucleotide sequences in MSA. " +
"BAD record for MSA row_id: " + row_id + "\n" + MSA_in['alignment'][row_id] + "\n")
this_appropriate_sequence_found_in_MSA_input = False
break
else:
raise ValueError("Incorrectly formatted call of _check_MSA_sequence_type_correct() method")
# return sequence type check logical
#
return (this_appropriate_sequence_found_in_MSA_input, msa_invalid_msgs)
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
self.config = config
self.config['SDK_CALLBACK_URL'] = os.environ.get('SDK_CALLBACK_URL')
self.config['KB_AUTH_TOKEN'] = os.environ.get('KB_AUTH_TOKEN')
self.workspaceURL = config['workspace-url']
self.shockURL = config['shock-url']
self.handleURL = config['handle-service-url']
self.serviceWizardURL = config['service-wizard-url']
# self.callbackURL = os.environ['SDK_CALLBACK_URL'] if os.environ['SDK_CALLBACK_URL'] != None else 'https://kbase.us/services/njs_wrapper'
self.callbackURL = os.environ.get('SDK_CALLBACK_URL')
if self.callbackURL == None:
raise ValueError("SDK_CALLBACK_URL not set in environment")
self.scratch = os.path.abspath(config['scratch'])
if self.scratch == None:
self.scratch = os.path.join('/kb', 'module', 'local_scratch')
if not os.path.exists(self.scratch):
os.makedirs(self.scratch)
# set i/o dirs
timestamp = int((datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds() * 1000)
self.input_dir = os.path.join(self.scratch, 'input.' + str(timestamp))
self.output_dir = os.path.join(self.scratch, 'output.' + str(timestamp))
if not os.path.exists(self.input_dir):
os.makedirs(self.input_dir)
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
#END_CONSTRUCTOR
pass
def HMMER_MSA_Search(self, ctx, params):
"""
Method for HMMER search of an MSA against many sequences
**
** overloading as follows:
** input_msa_ref: MSA
** input_many_ref: SequenceSet, FeatureSet, Genome, GenomeSet, AMA (note: SequenceSet deactivated)
** output_name: SequenceSet (if input_many is SequenceSet), (else) FeatureSet
:param params: instance of type "HMMER_Params" (HMMER Input Params)
-> structure: parameter "workspace_name" of type "workspace_name"
(** The workspace object refs are of form: ** ** objects =
ws.get_objects([{'ref':
params['workspace_id']+'/'+params['obj_name']}]) ** ** "ref" means
the entire name combining the workspace id and the object name **
"id" is a numerical identifier of the workspace or object, and
should just be used for workspace ** "name" is a string identifier
of a workspace or object. This is received from Narrative.),
parameter "input_many_ref" of type "data_obj_ref", parameter
"input_msa_ref" of type "data_obj_ref", parameter
"output_filtered_name" of type "data_obj_name", parameter
"genome_disp_name_config" of String, parameter "e_value" of
Double, parameter "bitscore" of Double, parameter "model_cov_perc"
of Double, parameter "maxaccepts" of Double
:returns: instance of type "HMMER_Output" (HMMER Output) ->
structure: parameter "report_name" of type "data_obj_name",
parameter "report_ref" of type "data_obj_ref"
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN HMMER_MSA_Search
console = []
invalid_msgs = []
msa_invalid_msgs = []
search_tool_name = 'HMMER_MSA_prot'
self.log(console, 'Running ' + search_tool_name + '_Search with params=')
self.log(console, "\n" + pformat(params))
report = ''
# report = 'Running '+search_tool_name+'_Search with params='
# report += "\n"+pformat(params)
#appropriate_sequence_found_in_one_input = False
appropriate_sequence_found_in_MSA_input = False
appropriate_sequence_found_in_many_input = False
genome_id_feature_id_delim = '.f:'
# set hmmer_dir
hmmer_dir = os.path.join(self.output_dir, 'hmmer_run')
if not os.path.exists(hmmer_dir):
os.makedirs(hmmer_dir)
#### do some basic checks
#
if 'workspace_name' not in params:
raise ValueError('workspace_name parameter is required')
# if 'input_one_ref' not in params:
# raise ValueError('input_one_ref parameter is required')
if 'input_msa_ref' not in params:
raise ValueError('input_msa_ref parameter is required')
if 'input_many_ref' not in params:
raise ValueError('input_many_ref parameter is required')
if 'genome_disp_name_config' not in params:
raise ValueError('genome_disp_name_config parameter is required')
if 'output_filtered_name' not in params:
raise ValueError('output_filtered_name parameter is required')
# set local names
# input_one_ref = params['input_one_ref']
input_msa_ref = params['input_msa_ref']
input_many_ref = params['input_many_ref']
#### Get the input_msa object
##
# if input_one_feature_id == None:
# self.log(invalid_msgs,"input_one_feature_id was not obtained from Query Object: "+input_one_name)
# master_row_idx = 0
try:
ws = Workspace(self.workspaceURL, token=ctx['token'])
#objects = ws.get_objects([{'ref': input_msa_ref}])
objects = ws.get_objects2({'objects': [{'ref': input_msa_ref}]})['data']
input_msa_data = objects[0]['data']
info = objects[0]['info']
input_msa_name = str(info[1])
msa_type_name = info[2].split('.')[1].split('-')[0]
except Exception as e:
raise ValueError('Unable to fetch input_msa_name object from workspace: ' + str(e))
#to get the full stack trace: traceback.format_exc()
if msa_type_name != 'MSA':
raise ValueError('Cannot yet handle input_msa type of: ' + msa_type_name)
else:
self.log(console, "\n\nPROCESSING MSA " + input_msa_name + "\n") # DEBUG
MSA_in = input_msa_data
row_order = []
default_row_labels = dict()
if 'row_order' in MSA_in.keys():
row_order = MSA_in['row_order']
else:
row_order = sorted(MSA_in['alignment'].keys())
if 'default_row_labels' in MSA_in.keys():
default_row_labels = MSA_in['default_row_labels']
else:
for row_id in row_order:
default_row_labels[row_id] = row_id
# determine row index of query sequence
# for row_id in row_order:
# master_row_idx += 1
# if row_id == input_one_feature_id:
# break
# if master_row_idx == 0:
# self.log(invalid_msgs,"Failed to find query id "+input_one_feature_id+" from Query Object "+input_one_name+" within MSA: "+input_msa_name)
# export features to CLUSTAL formatted MSA (HMMER BUILD seems to only take CLUSTAL)
input_MSA_file_path = os.path.join(hmmer_dir, input_msa_name + ".clustal")
self.log(console, 'writing MSA file: ' + input_MSA_file_path)
# set header
header = 'CLUSTAL W (1.81) multiple sequence alignment'
# get longest id
longest_row_id_len = 0
for row_id in row_order:
if len(row_id) > longest_row_id_len:
longest_row_id_len = len(row_id)
# make sure rows are all same length
row_id_0 = row_order[0]
row_len = len(MSA_in['alignment'][row_id_0])
for row_id in row_order:
if len(MSA_in['alignment'][row_id]) != row_len:
raise ValueError("MSA alignment rows are not constant length")
# get alignment line (just storing identity markers)
conservation_symbol = ''
for i in range(row_len):
first_seen_char = MSA_in['alignment'][row_id_0][i]
symbol = '*'
for row_id in row_order:
if MSA_in['alignment'][row_id][i] == '-' or MSA_in['alignment'][row_id][i] != first_seen_char:
symbol = ' '
break
conservation_symbol += symbol
# break up MSA into 60 char chunks
records = []
chunk_len = 60
whole_chunks = int(math.floor(row_len / chunk_len))
if whole_chunks > 0:
for j in range(whole_chunks):
records.append('')
for row_id in row_order:
padding = ''
if longest_row_id_len - len(row_id) > 0:
for i in range(0, longest_row_id_len - len(row_id)):
padding += ' '
records.append(row_id + padding + " " +
MSA_in['alignment'][row_id][j * chunk_len:(j + 1) * chunk_len])
records.append(''.join([' ' for s in range(longest_row_id_len)]) + " " +
conservation_symbol[j * chunk_len:(j + 1) * chunk_len])
# add final rows
if (row_len % chunk_len) != 0:
j = whole_chunks
records.append('')
for row_id in row_order:
padding = ''
if longest_row_id_len - len(row_id) > 0:
for i in range(0, longest_row_id_len - len(row_id)):
padding += ' '
records.append(row_id + padding + " " +
MSA_in['alignment'][row_id][j * chunk_len:row_len])
records.append(''.join([' ' for s in range(longest_row_id_len)]) + " " +
conservation_symbol[j * chunk_len:row_len])
# write that sucker
with open(input_MSA_file_path, 'w', 0) as input_MSA_file_handle:
input_MSA_file_handle.write(header + "\n")
input_MSA_file_handle.write("\n".join(records) + "\n")
# DEBUG
#report += "MSA:\n"
#report += header+"\n"
#report += "\n".join(records)+"\n"
#self.log(console,report)
# Determine whether nuc or protein sequences
#
self.log(console, "CHECKING MSA for PROTEIN seqs...") # DEBUG
(appropriate_sequence_found_in_MSA_input, these_msa_invalid_msgs) = \
self._check_MSA_sequence_type_correct(MSA_in, row_order, 'PROTEIN')
msa_invalid_msgs.extend(these_msa_invalid_msgs)
#### Get the input_many object
##
try:
ws = Workspace(self.workspaceURL, token=ctx['token'])
#objects = ws.get_objects([{'ref': input_many_ref}])
objects = ws.get_objects2({'objects': [{'ref': input_many_ref}]})['data']
input_many_data = objects[0]['data']
info = objects[0]['info']
input_many_name = str(info[1])
many_type_name = info[2].split('.')[1].split('-')[0]
except Exception as e:
raise ValueError('Unable to fetch input_many_name object from workspace: ' + str(e))
#to get the full stack trace: traceback.format_exc()
# Handle overloading (input_many can be SequenceSet, FeatureSet, Genome, or GenomeSet)
#
if many_type_name == 'SequenceSet':
try:
input_many_sequenceSet = input_many_data
except Exception as e:
print(traceback.format_exc())
raise ValueError('Unable to get SequenceSet: ' + str(e))
header_id = input_many_sequenceSet['sequences'][0]['sequence_id']
many_forward_reads_file_path = os.path.join(self.output_dir, header_id + '.fasta')
many_forward_reads_file_handle = open(many_forward_reads_file_path, 'w', 0)
self.log(console, 'writing reads file: ' + str(many_forward_reads_file_path))
for seq_obj in input_many_sequenceSet['sequences']:
header_id = seq_obj['sequence_id']
sequence_str = seq_obj['sequence']
PROT_pattern = re.compile("^[acdefghiklmnpqrstvwyACDEFGHIKLMNPQRSTVWYxX ]+$")
DNA_pattern = re.compile("^[acgtuACGTUnryNRY ]+$")
if DNA_pattern.match(sequence_str):
self.log(invalid_msgs,
"Require protein sequences for target. " +
"BAD nucleotide record for sequence_id: " + header_id + "\n" + sequence_str + "\n")
continue
elif not PROT_pattern.match(sequence_str):
self.log(invalid_msgs, "BAD record for sequence_id: " + header_id + "\n" + sequence_str + "\n")
continue
appropriate_sequence_found_in_many_input = True
many_forward_reads_file_handle.write('>' + header_id + "\n")
many_forward_reads_file_handle.write(sequence_str + "\n")
many_forward_reads_file_handle.close()
self.log(console, 'done')
# FeatureSet
#
if many_type_name == 'FeatureSet':
# retrieve sequences for features
input_many_featureSet = input_many_data
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
FeatureSetToFASTA_params = {
'featureSet_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%genome_ref%%' + genome_id_feature_id_delim + '%%feature_id%%',
'record_desc_pattern': '[%%genome_ref%%]',
'case': 'upper',
'linewrap': 50,
'merge_fasta_files': 'TRUE'
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'dev'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
FeatureSetToFASTA_retVal = DOTFU.FeatureSetToFASTA(FeatureSetToFASTA_params)
many_forward_reads_file_path = FeatureSetToFASTA_retVal['fasta_file_path']
feature_ids_by_genome_ref = FeatureSetToFASTA_retVal['feature_ids_by_genome_ref']
if len(feature_ids_by_genome_ref.keys()) > 0:
appropriate_sequence_found_in_many_input = True
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "FeatureSetToFasta() took "+str(end_time-beg_time)+" secs")
# Genome
#
elif many_type_name == 'Genome':
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
GenomeToFASTA_params = {
'genome_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%feature_id%%',
'record_desc_pattern': '[%%genome_id%%]',
'case': 'upper',
'linewrap': 50
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'dev'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
GenomeToFASTA_retVal = DOTFU.GenomeToFASTA(GenomeToFASTA_params)
many_forward_reads_file_path = GenomeToFASTA_retVal['fasta_file_path']
feature_ids = GenomeToFASTA_retVal['feature_ids']
if len(feature_ids) > 0:
appropriate_sequence_found_in_many_input = True
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "Genome2Fasta() took "+str(end_time-beg_time)+" secs")
# GenomeSet
#
elif many_type_name == 'GenomeSet':
input_many_genomeSet = input_many_data
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
GenomeSetToFASTA_params = {
'genomeSet_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%genome_ref%%' + genome_id_feature_id_delim + '%%feature_id%%',
'record_desc_pattern': '[%%genome_ref%%]',
'case': 'upper',
'linewrap': 50,
'merge_fasta_files': 'TRUE'
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'dev'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
GenomeSetToFASTA_retVal = DOTFU.GenomeSetToFASTA(GenomeSetToFASTA_params)
many_forward_reads_file_path = GenomeSetToFASTA_retVal['fasta_file_path_list'][0]
feature_ids_by_genome_id = GenomeSetToFASTA_retVal['feature_ids_by_genome_id']
if len(feature_ids_by_genome_id.keys()) > 0:
appropriate_sequence_found_in_many_input = True
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "FeatureSetToFasta() took "+str(end_time-beg_time)+" secs")
# AnnotatedMetagenomeAssembly
#
elif many_type_name == 'AnnotatedMetagenomeAssembly':
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
AnnotatedMetagenomeAssemblyToFASTA_params = {
'ama_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%feature_id%%',
'record_desc_pattern': '[%%genome_id%%]',
'case': 'upper',
'linewrap': 50
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'beta'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
AnnotatedMetagenomeAssemblyToFASTA_retVal = DOTFU.AnnotatedMetagenomeAssemblyToFASTA (AnnotatedMetagenomeAssemblyToFASTA_params)
many_forward_reads_file_path = AnnotatedMetagenomeAssemblyToFASTA_retVal['fasta_file_path']
feature_ids = AnnotatedMetagenomeAssemblyToFASTA_retVal['feature_ids']
if len(feature_ids) > 0:
appropriate_sequence_found_in_many_input = True
genome_refs = [input_many_ref]
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "Genome2Fasta() took "+str(end_time-beg_time)+" secs")
# Missing proper input_many_type
#
else:
raise ValueError('Cannot yet handle input_many type of: ' + many_type_name)
# Get total number of sequences in input_many search db
#
seq_total = 0
if many_type_name == 'SequenceSet':
seq_total = len(input_many_sequenceSet['sequences'])
elif many_type_name == 'FeatureSet':
seq_total = len(input_many_featureSet['elements'].keys())
elif many_type_name == 'Genome' or many_type_name == 'AnnotatedMetagenomeAssembly':
seq_total = len(feature_ids)
elif many_type_name == 'GenomeSet':
for genome_id in feature_ids_by_genome_id.keys():
seq_total += len(feature_ids_by_genome_id[genome_id])
# check for failed input file creation
#
# if not appropriate_sequence_found_in_one_input:
# self.log(invalid_msgs,"no protein sequences found in '"+input_one_name+"'")
if not appropriate_sequence_found_in_MSA_input:
self.log(invalid_msgs, "Protein sequences not found in '" + input_msa_name + "'")
if not appropriate_sequence_found_in_many_input:
self.log(invalid_msgs, "Protein sequences not found in '" + input_many_name + "'")
# input data failed validation. Need to return
#
if len(invalid_msgs) > 0:
# load the method provenance from the context object
#
self.log(console, "SETTING PROVENANCE") # DEBUG
provenance = [{}]
if 'provenance' in ctx:
provenance = ctx['provenance']
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = []
# provenance[0]['input_ws_objects'].append(input_one_ref)
provenance[0]['input_ws_objects'].append(input_msa_ref)
provenance[0]['input_ws_objects'].append(input_many_ref)
provenance[0]['service'] = 'kb_hmmer'
provenance[0]['method'] = search_tool_name + '_Search'
# build output report object
#
self.log(console, "BUILDING REPORT") # DEBUG
report += "FAILURE:\n\n" + "\n".join(invalid_msgs) + "\n"
reportObj = {
'objects_created': [],
'text_message': report
}
reportName = 'hmmer_report_' + str(uuid.uuid4())
ws = Workspace(self.workspaceURL, token=ctx['token'])
report_obj_info = ws.save_objects({
#'id':info[6],
'workspace': params['workspace_name'],
'objects': [
{
'type': 'KBaseReport.Report',
'data': reportObj,
'name': reportName,
'meta': {},
'hidden': 1,
'provenance': provenance # DEBUG
}
]
})[0]
self.log(console, "BUILDING RETURN OBJECT")
returnVal = {'report_name': reportName,
'report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]),
}
self.log(console, search_tool_name + "_Search DONE")
return [returnVal]
# Set output paths
#output_aln_file_path = os.path.join(hmmer_dir, 'alnout.txt');
#output_extra_file_path = os.path.join(hmmer_dir, 'alnout_extra.txt');
#output_filtered_fasta_file_path = os.path.join(hmmer_dir, 'output_filtered.faa');
# Build HMM from MSA
#
# SYNTAX (from http://eddylab.org/software/hmmer3/3.1b2/Userguide.pdf)
#
# hmmbuild --informat fasta <hmmfile.out> <msafile>
#
hmmer_build_bin = self.HMMER_BUILD
hmmer_build_cmd = [hmmer_build_bin]
# check for necessary files
if not os.path.isfile(hmmer_build_bin):
raise ValueError("no such file '" + hmmer_build_bin + "'")
if not os.path.isfile(input_MSA_file_path):
raise ValueError("no such file '" + input_MSA_file_path + "'")
elif not os.path.getsize(input_MSA_file_path) > 0:
raise ValueError("empty file '" + input_MSA_file_path + "'")
HMM_file_path = input_MSA_file_path + ".HMM"
hmmer_build_cmd.append('--informat')
hmmer_build_cmd.append('CLUSTAL')
hmmer_build_cmd.append(HMM_file_path)
hmmer_build_cmd.append(input_MSA_file_path)
# Run HMMER_BUILD, capture output as it happens
#
self.log(console, 'RUNNING HMMER_BUILD:')
self.log(console, ' ' + ' '.join(hmmer_build_cmd))
# report += "\n"+'running HMMER_BUILD:'+"\n"
# report += ' '+' '.join(hmmer_build_cmd)+"\n"
p = subprocess.Popen(hmmer_build_cmd,
cwd=self.output_dir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False)
while True:
line = p.stdout.readline()
if not line:
break
#self.log(console, line.replace('\n', ''))
p.stdout.close()
p.wait()
self.log(console, 'return code: ' + str(p.returncode))
if p.returncode != 0:
raise ValueError('Error running HMMER_BUILD, return code: ' + str(p.returncode) +
'\n\n' + '\n'.join(console))
# Check for HMM output
if not os.path.isfile(HMM_file_path):
raise ValueError("HMMER_BUILD failed to create HMM file '" + HMM_file_path + "'")
elif not os.path.getsize(HMM_file_path) > 0:
raise ValueError("HMMER_BUILD created empty HMM file '" + HMM_file_path + "'")
# get model len
model_len = 0
with open (HMM_file_path, 'r') as HMM_handle:
for HMM_line in HMM_handle.readlines():
if HMM_line.startswith('LENG '):
model_len = int(HMM_line.replace('LENG ','').strip())
break
if model_len == 0:
raise ValueError ("No length found in HMM file")
# DEBUG
#with open (HMM_file_path, 'r') as HMM_file_handle:
# for line in HMM_file_handle.readlines():
# self.log(console, "HMM_FILE: '"+str(line)+"'")
### Construct the HMMER_SEARCH command
#
# SYNTAX (from http://eddylab.org/software/hmmer3/3.1b2/Userguide.pdf)
#
# hmmsearch --tblout <TAB_out> -A <MSA_out> --noali --notextw -E <e_value> -T <bit_score> <hmmfile> <seqdb>
#
hmmer_search_bin = self.HMMER_SEARCH
hmmer_search_cmd = [hmmer_search_bin]
# check for necessary files
if not os.path.isfile(hmmer_search_bin):
raise ValueError("no such file '" + hmmer_search_bin + "'")
if not os.path.isfile(HMM_file_path):
raise ValueError("no such file '" + HMM_file_path + "'")
elif not os.path.getsize(HMM_file_path):
raise ValueError("empty file '" + HMM_file_path + "'")
if not os.path.isfile(many_forward_reads_file_path):
raise ValueError("no such file '" + many_forward_reads_file_path + "'")
elif not os.path.getsize(many_forward_reads_file_path):
raise ValueError("empty file '" + many_forward_reads_file_path + "'")
output_hit_TAB_file_path = os.path.join(hmmer_dir, 'hitout.txt')
output_hit_MSA_file_path = os.path.join(hmmer_dir, 'msaout.txt')
output_filtered_fasta_file_path = os.path.join(hmmer_dir, 'output_filtered.fasta')
# this is command for basic search mode
hmmer_search_cmd.append('--tblout')
hmmer_search_cmd.append(output_hit_TAB_file_path)
hmmer_search_cmd.append('-A')
hmmer_search_cmd.append(output_hit_MSA_file_path)
hmmer_search_cmd.append('--noali')
hmmer_search_cmd.append('--notextw')
hmmer_search_cmd.append('-E') # can't use -T with -E, so we'll use -E
hmmer_search_cmd.append(str(params['e_value']))
hmmer_search_cmd.append(HMM_file_path)
hmmer_search_cmd.append(many_forward_reads_file_path)
# options
# if 'maxaccepts' in params:
# if params['maxaccepts']:
# hmmer_search_cmd.append('-max_target_seqs')
# hmmer_search_cmd.append(str(params['maxaccepts']))
# Run HMMER, capture output as it happens
#
self.log(console, 'RUNNING HMMER_SEARCH:')
self.log(console, ' ' + ' '.join(hmmer_search_cmd))
# report += "\n"+'running HMMER_SEARCH:'+"\n"
# report += ' '+' '.join(hmmer_search_cmd)+"\n"
p = subprocess.Popen(hmmer_search_cmd,
cwd=self.output_dir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False)
while True:
line = p.stdout.readline()
if not line:
break
#self.log(console, line.replace('\n', ''))
p.stdout.close()
p.wait()
self.log(console, 'return code: ' + str(p.returncode))
if p.returncode != 0:
raise ValueError('Error running HMMER_SEARCH, return code: ' + str(p.returncode) +
'\n\n' + '\n'.join(console))
# Check for output
if not os.path.isfile(output_hit_TAB_file_path):
raise ValueError("HMMER_SEARCH failed to create TAB file '" + output_hit_TAB_file_path + "'")
elif not os.path.getsize(output_hit_TAB_file_path) > 0:
raise ValueError("HMMER_SEARCH created empty TAB file '" + output_hit_TAB_file_path + "'")
if not os.path.isfile(output_hit_MSA_file_path):
raise ValueError("HMMER_SEARCH failed to create MSA file '" + output_hit_MSA_file_path + "'")
elif not os.path.getsize(output_hit_MSA_file_path) > 0:
raise ValueError("HMMER_SEARCH created empty MSA file '" + output_hit_MSA_file_path + "'")
# DEBUG
# report = "TAB:\n\n"
# with open (output_hit_TAB_file_path, 'r') as output_handle:
# for line in output_handle:
# report += line+"\n"
# report += "\n\nMSA:\n\n"
# with open (output_hit_MSA_file_path, 'r') as output_handle:
# for line in output_handle:
# report += line+"\n"
# Parse the hit beg and end positions from Stockholm format MSA output for overlap filtering
#
self.log(console, 'PARSING HMMER SEARCH MSA OUTPUT')
hit_beg = dict()
hit_end = dict()
longest_alnlen = dict()
with open(output_hit_MSA_file_path, 'r', 0) as output_hit_MSA_file_handle:
for MSA_out_line in output_hit_MSA_file_handle.readlines():
MSA_out_line = MSA_out_line.strip()
if MSA_out_line.startswith('#=GS '):
hit_rec = re.sub('#=GS ', '', MSA_out_line)
hit_rec = re.sub('\s+.*?$', '', hit_rec)
hit_range = re.sub('^.*\/', '', hit_rec)
hit_id = re.sub('\/[^\/]+$', '', hit_rec)
(beg_str, end_str) = hit_range.split('-')
beg = int(beg_str)
end = int(end_str)
this_alnlen = abs(end - beg) + 1
if hit_id in hit_beg:
if this_alnlen > longest_alnlen[hit_id]:
hit_beg[hit_id] = beg
hit_end[hit_id] = end
longest_alnlen[hit_id] = this_alnlen
else:
hit_beg[hit_id] = beg
hit_end[hit_id] = end
longest_alnlen[hit_id] = this_alnlen
# Measure length of hit sequences
#
self.log(console, 'MEASURING HIT GENES LENGTHS')
hit_seq_len = dict()
with open(many_forward_reads_file_path, 'r', 0) as many_forward_reads_file_handle:
last_id = None
last_buf = ''
for fasta_line in many_forward_reads_file_handle.readlines():
fasta_line = fasta_line.strip()
if fasta_line.startswith('>'):
if last_id != None:
id_untrans = last_id
id_trans = re.sub('\|', ':', id_untrans)
#if id_untrans in hit_order or id_trans in hit_order:
if id_untrans in hit_beg or id_trans in hit_beg:
hit_seq_len[last_id] = len(last_buf)
header = re.sub('^>', '', fasta_line)
last_id = re.sub('\s+.*?$', '', header)
last_buf = ''
else:
last_buf += fasta_line
if last_id != None:
id_untrans = last_id
id_trans = re.sub('\|', ':', id_untrans)
#if id_untrans in hit_order or id_trans in hit_order:
if id_untrans in hit_beg or id_trans in hit_beg:
hit_seq_len[last_id] = len(last_buf)
# DEBUG
for hit_id in hit_beg.keys():
print ("HIT_ID: '" + str(hit_id) + "' BEG: '" +
str(hit_beg[hit_id]) + "' END: '" + str(hit_end[hit_id]) + "' SEQLEN: '" + str(hit_seq_len[hit_id]) + "'")
# Parse the HMMER tabular output and store ids to filter many set to make filtered object to save back to KBase
#
self.log(console, 'PARSING HMMER SEARCH TAB OUTPUT')
hit_seq_ids = dict()
accept_fids = dict()
output_hit_TAB_file_handle = open(output_hit_TAB_file_path, "r", 0)
output_aln_buf = output_hit_TAB_file_handle.readlines()
output_hit_TAB_file_handle.close()
total_hit_cnt = 0
accepted_hit_cnt = 0
high_bitscore_line = dict()
high_bitscore_score = dict()
#high_bitscore_ident = dict()
#longest_alnlen = dict()
hit_order = []
hit_buf = []
#header_done = False
for line in output_aln_buf:
if line.startswith('#'):
#if not header_done:
# hit_buf.append(line)
continue
#header_done = True
#self.log(console,'HIT LINE: '+line) # DEBUG
hit_info = re.split('\s+', line)
hit_seq_id = hit_info[0]
hit_accession = hit_info[1]
query_name = hit_info[2]
query_accession = hit_info[3]
hit_e_value = float(hit_info[4])
hit_bitscore = float(hit_info[5])
hit_bias = float(hit_info[6])
hit_e_value_best_dom = float(hit_info[7])
hit_bitscore_best_dom = float(hit_info[8])
hit_bias_best_dom = float(hit_info[9])
hit_expected_dom_n = float(hit_info[10])
hit_regions = float(hit_info[11])
hit_regions_multidom = float(hit_info[12])
hit_overlaps = float(hit_info[13])
hit_envelopes = float(hit_info[14])
hit_dom_n = float(hit_info[15])
hit_doms_within_rep_thresh = float(hit_info[16])
hit_doms_within_inc_thresh = float(hit_info[17])
hit_desc = hit_info[18]
try:
if hit_bitscore > high_bitscore_score[hit_seq_id]:
high_bitscore_score[hit_seq_id] = hit_bitscore
high_bitscore_line[hit_seq_id] = line
except:
if hit_seq_id in hit_seq_len:
hit_order.append(hit_seq_id)
high_bitscore_score[hit_seq_id] = hit_bitscore
high_bitscore_line[hit_seq_id] = line
else:
self.log(console, "ALERT!!! HIT "+hit_seq_id+" not found in MSA alignment and is likely a very weak hit (E-value is "+str(hit_e_value)+" and bitscore is "+str(hit_bitscore)+". SKIPPING HIT.")
filtering_fields = dict()
total_hit_cnt = len(hit_order)
for hit_seq_id in hit_order:
hit_buf.append(high_bitscore_line[hit_seq_id])
filtering_fields[hit_seq_id] = dict()
filter = False
#self.log(console,"HIT_SEQ_ID: '"+hit_seq_id+"'")
#if 'ident_thresh' in params and float(params['ident_thresh']) > float(high_bitscore_ident[hit_seq_id]):
# continue
if 'bitscore' in params and float(params['bitscore']) > float(high_bitscore_score[hit_seq_id]):
filter = True
filtering_fields[hit_seq_id]['bitscore'] = True
if 'model_cov_perc' in params and float(params['model_cov_perc']) > 100.0 * float(longest_alnlen[hit_seq_id]) / float(model_len):
filter = True
filtering_fields[hit_seq_id]['model_cov_perc'] = True
if 'maxaccepts' in params and params['maxaccepts'] != None and accepted_hit_cnt == int(params['maxaccepts']):
filter = True
filtering_fields[hit_seq_id]['maxaccepts'] = True
if filter:
continue
accepted_hit_cnt += 1
hit_seq_ids[hit_seq_id] = True
self.log(console, "HIT: '" + hit_seq_id + "'") # DEBUG
self.log(console, "\t" + "BEG: " + str(hit_beg[hit_seq_id]) + ", END: " +
str(hit_end[hit_seq_id]) + " ,SEQLEN: " + str(hit_seq_len[hit_seq_id]))
#
### Create output objects
#
if accepted_hit_cnt == 0:
self.log(console, 'THERE WERE NO ACCEPTED HITS. NOT BUILDING OUTPUT OBJECT')
else:
self.log(console, 'EXTRACTING HITS FROM INPUT')
self.log(console, 'MANY_TYPE_NAME: ' + many_type_name) # DEBUG
# SequenceSet input -> SequenceSet output
#
if many_type_name == 'SequenceSet':
output_sequenceSet = dict()
if 'sequence_set_id' in input_many_sequenceSet and input_many_sequenceSet['sequence_set_id'] != None:
output_sequenceSet['sequence_set_id'] = input_many_sequenceSet['sequence_set_id'] + \
"." + search_tool_name + "_Search_filtered"
else:
output_sequenceSet['sequence_set_id'] = search_tool_name + "_Search_filtered"
if 'description' in input_many_sequenceSet and input_many_sequenceSet['description'] != None:
output_sequenceSet['description'] = input_many_sequenceSet['description'] + \
" - " + search_tool_name + "_Search filtered"
else:
output_sequenceSet['description'] = search_tool_anme + "_Search filtered"
self.log(console, "ADDING SEQUENCES TO SEQUENCESET")
output_sequenceSet['sequences'] = []
for seq_obj in input_many_sequenceSet['sequences']:
header_id = seq_obj['sequence_id']
#header_desc = seq_obj['description']
#sequence_str = seq_obj['sequence']
id_untrans = header_id
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT '+header_id) # DEBUG
accept_fids[id_untrans] = True
output_sequenceSet['sequences'].append(seq_obj)
# FeatureSet input -> FeatureSet output
#
elif many_type_name == 'FeatureSet':
output_featureSet = dict()
if 'description' in input_many_featureSet and input_many_featureSet['description'] != None:
output_featureSet['description'] = input_many_featureSet['description'] + \
" - " + search_tool_name + "_Search filtered"
else:
output_featureSet['description'] = search_tool_name + "_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
fId_list = input_many_featureSet['elements'].keys()
self.log(console, "ADDING FEATURES TO FEATURESET")
for fId in sorted(fId_list):
for genome_ref in input_many_featureSet['elements'][fId]:
id_untrans = genome_ref + genome_id_feature_id_delim + fId
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT '+fId) # DEBUG
accept_fids[id_untrans] = True
#fId = id_untrans # don't change fId for output FeatureSet
try:
this_genome_ref_list = output_featureSet['elements'][fId]
except:
output_featureSet['elements'][fId] = []
output_featureSet['element_ordering'].append(fId)
output_featureSet['elements'][fId].append(genome_ref)
# Parse Genome hits into FeatureSet
#
elif many_type_name == 'Genome':
output_featureSet = dict()
# if 'scientific_name' in input_many_genome and input_many_genome['scientific_name'] != None:
# output_featureSet['description'] = input_many_genome['scientific_name'] + " - "+search_tool_name+"_Search filtered"
# else:
# output_featureSet['description'] = search_tool_name+"_Search filtered"
output_featureSet['description'] = search_tool_name + "_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
for fid in feature_ids:
id_untrans = fid
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT '+fid) # DEBUG
#output_featureSet['element_ordering'].append(fid)
accept_fids[id_untrans] = True
#fid = input_many_ref+genome_id_feature_id_delim+id_untrans # don't change fId for output FeatureSet
output_featureSet['element_ordering'].append(fid)
output_featureSet['elements'][fid] = [input_many_ref]
# Parse GenomeSet hits into FeatureSet
#
elif many_type_name == 'GenomeSet':
output_featureSet = dict()
if 'description' in input_many_genomeSet and input_many_genomeSet['description'] != None:
output_featureSet['description'] = input_many_genomeSet['description'] + \
" - " + search_tool_name + "_Search filtered"
else:
output_featureSet['description'] = search_tool_name + "_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
self.log(console, "READING HITS FOR GENOMES") # DEBUG
for genome_id in feature_ids_by_genome_id.keys():
self.log(console, "READING HITS FOR GENOME " + genome_id) # DEBUG
genome_ref = input_many_genomeSet['elements'][genome_id]['ref']
for feature_id in feature_ids_by_genome_id[genome_id]:
id_untrans = genome_ref + genome_id_feature_id_delim + feature_id
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT: '+feature['id']) # DEBUG
#output_featureSet['element_ordering'].append(feature['id'])
accept_fids[id_untrans] = True
#feature_id = id_untrans # don't change fId for output FeatureSet
try:
this_genome_ref_list = output_featureSet['elements'][feature_id]
except:
output_featureSet['elements'][feature_id] = []
output_featureSet['element_ordering'].append(feature_id)
output_featureSet['elements'][feature_id].append(genome_ref)
# Parse AnnotatedMetagenomeAssembly hits into FeatureSet
#
elif many_type_name == 'AnnotatedMetagenomeAssembly':
seq_total = 0
output_featureSet = dict()
# if 'scientific_name' in input_many_genome and input_many_genome['scientific_name'] != None:
# output_featureSet['description'] = input_many_genome['scientific_name'] + " - "+search_tool_name+"_Search filtered"
# else:
# output_featureSet['description'] = search_tool_name+"_Search filtered"
output_featureSet['description'] = search_tool_name+"_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
for fid in feature_ids:
#if fid == 'AWN69_RS07145' or fid == 'AWN69_RS13375':
# self.log(console, 'CHECKING FID '+fid) # DEBUG
seq_total += 1
id_untrans = fid
id_trans = re.sub ('\|',':',id_untrans)
#print ("TESTING FEATURES: ID_UNTRANS: '"+id_untrans+"'") # DEBUG
#print ("TESTING FEATURES: ID_TRANS: '"+id_trans+"'") # DEBUG
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
self.log(console, 'FOUND HIT '+fid) # DEBUG
#output_featureSet['element_ordering'].append(fid)
accept_fids[id_untrans] = True
#fid = input_many_ref+self.genome_id_feature_id_delim+id_untrans # don't change fId for output FeatureSet
ama_ref = params['input_many_ref']
output_featureSet['element_ordering'].append(fid)
output_featureSet['elements'][fid] = [ama_ref]
# load the method provenance from the context object
#
self.log(console, "SETTING PROVENANCE") # DEBUG
provenance = [{}]
if 'provenance' in ctx:
provenance = ctx['provenance']
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = []
# provenance[0]['input_ws_objects'].append(input_one_ref)
provenance[0]['input_ws_objects'].append(input_msa_ref)
provenance[0]['input_ws_objects'].append(input_many_ref)
provenance[0]['service'] = 'kb_blast'
provenance[0]['method'] = search_tool_name + '_Search'
# Upload results
#
self.log(console, "UPLOADING RESULTS") # DEBUG
# input many SequenceSet -> save SequenceSet
#
if many_type_name == 'SequenceSet':
new_obj_info = ws.save_objects({
'workspace': params['workspace_name'],
'objects': [{
'type': 'KBaseSequences.SequenceSet',
'data': output_sequenceSet,
'name': params['output_filtered_name'],
'meta': {},
'provenance': provenance
}]
})[0]
else: # input FeatureSet, Genome, and GenomeSet -> upload FeatureSet output
new_obj_info = ws.save_objects({
'workspace': params['workspace_name'],
'objects': [{
'type': 'KBaseCollections.FeatureSet',
'data': output_featureSet,
'name': params['output_filtered_name'],
'meta': {},
'provenance': provenance
}]
})[0]
# build output report object
#
self.log(console, "BUILDING REPORT") # DEBUG
if len(invalid_msgs) == 0 and total_hit_cnt > 0:
# text report
#
report += 'sequences in search db: ' + str(seq_total) + "\n"
report += 'sequences in hit set: ' + str(total_hit_cnt) + "\n"
report += 'sequences in accepted hit set: ' + str(accepted_hit_cnt) + "\n"
report += "\n"
for line in hit_buf:
report += line
self.log(console, report)
# build html report
if many_type_name == 'Genome':
feature_id_to_function = GenomeToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = GenomeToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = GenomeToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'GenomeSet':
feature_id_to_function = GenomeSetToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = GenomeSetToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = GenomeSetToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'FeatureSet':
feature_id_to_function = FeatureSetToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = FeatureSetToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = FeatureSetToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'AnnotatedMetagenomeAssembly':
feature_id_to_function = AnnotatedMetagenomeAssemblyToFASTA_retVal['feature_id_to_function']
ama_ref_to_obj_name = AnnotatedMetagenomeAssemblyToFASTA_retVal['ama_ref_to_obj_name']
head_color = "#eeeeff"
border_head_color = "#ffccff"
accept_row_color = 'white'
#reject_row_color = '#ffeeee'
reject_row_color = '#eeeeee'
reject_cell_color = '#ffcccc'
text_fontsize = "2"
text_color = '#606060'
border_body_color = "#cccccc"
bar_width = 100
bar_height = 15
bar_color = "lightblue"
bar_line_color = "#cccccc"
bar_fontsize = "1"
bar_char = "."
cellpadding = "3"
cellspacing = "2"
border = "0"
html_report_lines = []
html_report_lines += ['<html>']
html_report_lines += ['<body bgcolor="white">']
html_report_lines += ['<table cellpadding=' + cellpadding +
' cellspacing = ' + cellspacing + ' border=' + border + '>']
html_report_lines += ['<tr bgcolor="' + head_color + '">']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' + border_head_color +
'"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'ALIGNMENT COVERAGE (HIT SEQ)' + '</font></td>']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'GENE ID' + '</font></td>']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'FUNCTION' + '</font></td>']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'GENOME' + '</font></td>']
# html_report_lines += ['<td align=center style="border-right:solid 2px '+border_head_color+'; border-bottom:solid 2px '+border_head_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+'IDENT'+'%</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'ALN_LEN' + '</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'E-VALUE' + '</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'BIT SCORE' + '</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + '<nobr>H_BEG-H_END</nobr>' + '</font></td>']
# html_report_lines += ['<td align=center style="border-right:solid 2px '+border_head_color+'; border-bottom:solid 2px '+border_head_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+'MIS MATCH'+'</font></td>']
# html_report_lines += ['<td align=center style="border-right:solid 2px '+border_head_color+'; border-bottom:solid 2px '+border_head_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+'GAP OPEN'+'</font></td>']
html_report_lines += ['</tr>']
for line in hit_buf:
line = line.strip()
if line == '' or line.startswith('#'):
continue
[hit_id, hit_accession, query_name, query_accession, e_value, bit_score, bias, e_value_best_dom, bit_score_best_dom, bias_best_dom, expected_dom_n,
regions, regions_multidom, overlaps, envelopes, dom_n, doms_within_rep_thresh, doms_within_inc_thresh, hit_desc] = re.split('\s+', line)[0:19]
# [query_id, hit_id, identity, aln_len, mismatches, gap_openings, q_beg, q_end, h_beg, h_end, e_value, bit_score] = line.split("\t")[0:12]
# identity = str(round(float(identity), 1))
# if identity == '100.0': identity = '100'
# get coords with respect to hit sequence
h_len = hit_seq_len[hit_id]
h_beg = hit_beg[hit_id]
h_end = hit_end[hit_id]
aln_len = abs(h_end - h_beg) + 1
aln_len_perc = round(100.0 * float(aln_len) / float(model_len), 1)
#if many_type_name == 'SingleEndLibrary':
# pass
#elif many_type_name == 'SequenceSet':
if many_type_name == 'SequenceSet':
pass
elif many_type_name == 'Genome' or \
many_type_name == 'AnnotatedMetagenomeAssembly' or \
many_type_name == 'GenomeSet' or \
many_type_name == 'FeatureSet':
if 'Set' in many_type_name:
[genome_ref, hit_fid] = hit_id.split(genome_id_feature_id_delim)
else:
genome_ref = input_many_ref
hit_fid = hit_id
# can't just use hit_fid because may have pipes translated and can't translate back
fid_lookup = None
for fid in feature_id_to_function[genome_ref].keys():
id_untrans = fid
id_trans = re.sub('\|', ':', id_untrans)
#self.log (console, "SCANNING FIDS. HIT_FID: '"+str(hit_fid)+"' FID: '"+str(fid)+"' TRANS: '"+str(id_trans)+"'") # DEBUG
if id_untrans == hit_fid or id_trans == hit_fid:
#self.log (console, "GOT ONE!") # DEBUG
if many_type_name == 'Genome' or many_type_name == 'AnnotatedMetagenomeAssembly':
accept_id = fid
elif many_type_name == 'GenomeSet' or many_type_name == 'FeatureSet':
accept_id = genome_ref + genome_id_feature_id_delim + fid
if accept_id in accept_fids:
row_color = accept_row_color
else:
row_color = reject_row_color
fid_lookup = fid
break
#self.log (console, "HIT_FID: '"+str(hit_fid)+"' FID_LOOKUP: '"+str(fid_lookup)+"'") # DEBUG
if fid_lookup == None:
raise ValueError("unable to find fid for hit_fid: '" + str(hit_fid))
elif fid_lookup not in feature_id_to_function[genome_ref]:
raise ValueError("unable to find function for fid: '" + str(fid_lookup))
fid_disp = re.sub(r"^.*\.([^\.]+)\.([^\.]+)$", r"\1.\2", fid_lookup)
func_disp = feature_id_to_function[genome_ref][fid_lookup]
# set genome_disp_name
if many_type_name == 'AnnotatedMetagenomeAssembly':
genome_disp_name = ama_ref_to_obj_name[genome_ref]
else:
genome_obj_name = genome_ref_to_obj_name[genome_ref]
genome_sci_name = genome_ref_to_sci_name[genome_ref]
[ws_id, obj_id, genome_obj_version] = genome_ref.split('/')
genome_disp_name = ''
if 'obj_name' in params['genome_disp_name_config']:
genome_disp_name += genome_obj_name
if 'ver' in params['genome_disp_name_config']:
genome_disp_name += '.v'+str(genome_obj_version)
if 'sci_name' in params['genome_disp_name_config']:
genome_disp_name += ': '+genome_sci_name
# build html report table line
html_report_lines += ['<tr bgcolor="' + row_color + '">']
#html_report_lines += ['<tr bgcolor="'+'white'+'">'] # DEBUG
# add overlap bar
# coverage graphic (with respect to hit seq)
html_report_lines += ['<td valign=middle align=center style="border-right:solid 1px ' +
border_body_color + '; border-bottom:solid 1px ' + border_body_color + '">']
html_report_lines += ['<table style="height:' +
str(bar_height) + 'px; width:' + str(bar_width) + 'px" border=0 cellpadding=0 cellspacing=0>']
full_len_pos = bar_width
aln_beg_pos = int(float(bar_width) * float(int(h_beg) - 1) / float(int(h_len) - 1))
aln_end_pos = int(float(bar_width) * float(int(h_end) - 1) / float(int(h_len) - 1))
cell_pix_height = str(int(round(float(bar_height) / 3.0, 0)))
cell_color = ['', '', '']
cell_width = []
cell_width.append(aln_beg_pos)
cell_width.append(aln_end_pos - aln_beg_pos)
cell_width.append(bar_width - aln_end_pos)
for row_i in range(3):
html_report_lines += ['<tr style="height:' + cell_pix_height + 'px">']
unalign_color = row_color
if row_i == 1:
unalign_color = bar_line_color
cell_color[0] = unalign_color
cell_color[1] = bar_color
cell_color[2] = unalign_color
for col_i in range(3):
cell_pix_width = str(cell_width[col_i])
cell_pix_color = cell_color[col_i]
html_report_lines += ['<td style="height:' + cell_pix_height +
'px; width:' + cell_pix_width + 'px" bgcolor="' + cell_pix_color + '"></td>']
html_report_lines += ['</tr>']
html_report_lines += ['</table>']
html_report_lines += ['</td>']
# add other cells
# fid
html_report_lines += ['<td style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + str(fid_disp) + '</font></td>']
# html_report_lines += ['<td style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(hit_accession)+'</font></td>']
# func
html_report_lines += ['<td style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + func_disp + '</font></td>']
# genome name
html_report_lines += ['<td style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + genome_disp_name + '</font></td>']
# ident
# if 'ident_thresh' in filtering_fields[hit_id]:
# this_cell_color = reject_cell_color
# else:
# this_cell_color = row_color
# html_report_lines += ['<td align=center bgcolor="'+this_cell_color+'" style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(identity)+'%</font></td>']
# aln len
if 'model_cov_perc' in filtering_fields[hit_id]:
this_cell_color = reject_cell_color
else:
this_cell_color = row_color
html_report_lines += ['<td align=center bgcolor="' + str(this_cell_color) + '" style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + str(aln_len) + ' (' + str(aln_len_perc) + '%)</font></td>']
# evalue
html_report_lines += ['<td align=center style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '><nobr>' + str(e_value) + '</nobr></font></td>']
# bit score
if 'bitscore' in filtering_fields[hit_id]:
this_cell_color = reject_cell_color
else:
this_cell_color = row_color
html_report_lines += ['<td align=center bgcolor="' + str(this_cell_color) + '" style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '><nobr>' + str(bit_score) + '</nobr></font></td>']
# bias
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'><nobr>'+str(bias)+'</nobr><br><nobr>('+str(bias_best_dom)+')</nobr></font></td>']
# aln coords only for hit seq
html_report_lines += ['<td align=center style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' + border_body_color +
'"><font color="' + text_color + '" size=' + text_fontsize + '><nobr>' + str(h_beg) + '-' + str(h_end) + '</nobr></font></td>']
# mismatches?
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(mismatches)+'</font></td>']
# gaps?
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(gap_openings)+'</font></td>']
# regions
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(regions)+'</font></td>']
# regions_multidom
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(regions_multidom)+'</font></td>']
# overlaps
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(overlaps)+'</font></td>']
# envelopes
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(envelopes)+'</font></td>']
# expected_dom_n
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(expected_dom_n)+'</font></td>']
# doms
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(dom_n)+','+str(doms_within_rep_thresh)+','+str(doms_within_inc_thresh)+'</font></td>']
# hit desc
# html_report_lines += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(hit_desc)+'</font></td>']
html_report_lines += ['</tr>']
html_report_lines += ['</table>']
html_report_lines += ['</body>']
html_report_lines += ['</html>']
# write html to file and upload
#
html_report_str = "\n".join(html_report_lines)
html_output_dir = os.path.join(self.output_dir, 'html_output')
if not os.path.exists(html_output_dir):
os.makedirs(html_output_dir)
html_file = search_tool_name + '_Search.html'
html_path = os.path.join(html_output_dir, html_file)
with open(html_path, 'w', 0) as html_handle:
html_handle.write(html_report_str)
dfu = DFUClient(self.callbackURL)
try:
#HTML_upload_ret = dfu.file_to_shock({'file_path': html_path,
HTML_upload_ret = dfu.file_to_shock({'file_path': html_output_dir,
'make_handle': 0,
'pack': 'zip'})
except:
raise ValueError('Logging exception loading HTML file to shock')
try:
TAB_upload_ret = dfu.file_to_shock({'file_path': output_hit_TAB_file_path,
'make_handle': 0})
#'pack': 'zip'})
except:
raise ValueError('Logging exception loading TAB output to shock')
try:
MSA_upload_ret = dfu.file_to_shock({'file_path': output_hit_MSA_file_path,
'make_handle': 0})
#'pack': 'zip'})
except:
raise ValueError('Logging exception loading MSA output to shock')
# create report object
reportName = 'hmmer_report_' + str(uuid.uuid4())
reportObj = {'objects_created': [],
#'text_message': '', # or is it 'message'?
'message': '', # or is it 'text_message'?
'direct_html': None,
'direct_html_link_index': None,
'file_links': [],
'html_links': [],
'workspace_name': params['workspace_name'],
'report_object_name': reportName
}
#html_buf_lim = 16000 # really 16KB, but whatever
#if len(html_report_str) <= html_buf_lim:
# reportObj['direct_html'] = html_report_str
#else:
reportObj['direct_html_link_index'] = 0
reportObj['html_links'] = [{'shock_id': HTML_upload_ret['shock_id'],
'name': html_file,
'label': search_tool_name + ' HTML Report'}
#'description': search_tool_name + ' HTML Report'}
]
reportObj['file_links'] = [{'shock_id': TAB_upload_ret['shock_id'],
'name': search_tool_name + '_Search.TAB',
'label': search_tool_name + ' hits TABLE'},
{'shock_id': MSA_upload_ret['shock_id'],
'name': search_tool_name + '_Search.MSA',
'label': search_tool_name + ' hits MSA'},
]
# if extra_output:
# extension = 'txt'
# if params['output_extra_format'] == '5':
# extension = 'xml'
# elif params['output_extra_format'] == '8':
# extension = 'asn1txt'
# elif params['output_extra_format'] == '9':
# extension = 'asn1bin'
# elif params['output_extra_format'] == '10':
# extension = 'csv'
# elif params['output_extra_format'] == '11':
# extension = 'asn1arc'
# reportObj['file_links'].append({'shock_id': extra_upload_ret['shock_id'],
# 'name': search_tool_name+'_Search-m'+str(params['output_extra_format'])+'.'+extension,
# 'label': search_tool_name+' Results: m'+str(params['output_extra_format'])})
if accepted_hit_cnt > 0:
reportObj['objects_created'].append(
{'ref': str(params['workspace_name']) + '/' + params['output_filtered_name'], 'description': search_tool_name + ' hits'})
#reportObj['message'] = report
# save report object
#
SERVICE_VER = 'release'
reportClient = KBaseReport(self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
#report_info = report.create({'report':reportObj, 'workspace_name':params['workspace_name']})
report_info = reportClient.create_extended_report(reportObj)
else:
if total_hit_cnt == 0: # no hits
report += "No hits were found\n"
else: # data validation error
report += "FAILURE\n\n" + "\n".join(invalid_msgs) + "\n"
reportObj = {
'objects_created': [],
'text_message': report
}
reportName = 'hmmer_report_' + str(uuid.uuid4())
report_obj_info = ws.save_objects({
# 'id':info[6],
'workspace': params['workspace_name'],
'objects': [
{
'type': 'KBaseReport.Report',
'data': reportObj,
'name': reportName,
'meta': {},
'hidden': 1,
'provenance': provenance
}
]
})[0]
report_info = dict()
report_info['name'] = report_obj_info[1]
report_info['ref'] = str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4])
self.log(console, "BUILDING RETURN OBJECT")
# returnVal = { 'output_report_name': reportName,
# 'output_report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]),
# 'output_filtered_ref': params['workspace_name']+'/'+params['output_filtered_name']
# }
returnVal = {'report_name': report_info['name'],
'report_ref': report_info['ref']
}
self.log(console, search_tool_name + "_Search DONE")
#END HMMER_MSA_Search
# At some point might do deeper type checking...
if not isinstance(returnVal, dict):
raise ValueError('Method HMMER_MSA_Search return value ' +
'returnVal is not type dict as required.')
# return the results
return [returnVal]
def HMMER_Local_MSA_Group_Search(self, ctx, params):
"""
Method for HMMER search of a Local MSA Group (found automatically within workspace) against many sequences
**
** overloading as follows:
** input_many_ref: SequenceSet, FeatureSet, Genome, GenomeSet, AMA (note: SeqeuenceSet deactivated)
** output_name: SequenceSet (if input_many is SequenceSet), (else) FeatureSet
:param params: instance of type "HMMER_Local_MSA_Group_Params" (HMMER
Local MSA Group Input Params) -> structure: parameter
"workspace_name" of type "workspace_name" (** The workspace object
refs are of form: ** ** objects = ws.get_objects([{'ref':
params['workspace_id']+'/'+params['obj_name']}]) ** ** "ref" means
the entire name combining the workspace id and the object name **
"id" is a numerical identifier of the workspace or object, and
should just be used for workspace ** "name" is a string identifier
of a workspace or object. This is received from Narrative.),
parameter "input_msa_refs" of type "data_obj_ref", parameter
"input_many_ref" of type "data_obj_ref", parameter
"output_filtered_name" of type "data_obj_name", parameter
"genome_disp_name_config" of String, parameter "coalesce_output"
of type "bool", parameter "e_value" of Double, parameter
"bitscore" of Double, parameter "model_cov_perc" of Double,
parameter "maxaccepts" of Double, parameter "heatmap" of type
"bool", parameter "low_val" of type "bool", parameter "vertical"
of type "bool", parameter "show_blanks" of type "bool"
:returns: instance of type "HMMER_Output" (HMMER Output) ->
structure: parameter "report_name" of type "data_obj_name",
parameter "report_ref" of type "data_obj_ref"
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN HMMER_Local_MSA_Group_Search
console = []
invalid_msgs = []
msa_invalid_msgs = []
search_tool_name = 'HMMER_Local_MSA_Group_prot'
self.log(console, 'Running ' + search_tool_name + '_Search with params=')
self.log(console, "\n" + pformat(params))
report = ''
# report = 'Running '+search_tool_name+'_Search with params='
# report += "\n"+pformat(params)
#appropriate_sequence_found_in_one_input = False
appropriate_sequence_found_in_MSA_input = False
appropriate_sequence_found_in_many_input = False
genome_id_feature_id_delim = '.f:'
#### do some basic checks
#
if 'workspace_name' not in params:
raise ValueError('workspace_name parameter is required')
# if 'input_one_ref' not in params:
# raise ValueError('input_one_ref parameter is required')
# if 'input_msa_refs' not in params or len(params['input_msa_refs']) == 0:
# raise ValueError('input_msa_refs parameter is required if selecting local MSAs')
if 'input_many_ref' not in params:
raise ValueError('input_many_ref parameter is required')
if 'genome_disp_name_config' not in params:
raise ValueError('genome_disp_name_config parameter is required')
if 'output_filtered_name' not in params:
raise ValueError('output_filtered_name parameter is required')
#if 'coalesce_output' not in params:
# raise ValueError('coalesce_output parameter is required')
# never coalesce. what was I thinking!?!?!?!?!
params['coalesce_output'] = 0;
# set local names and ids
# input_one_ref = params['input_one_ref']
#input_msa_ref = params['input_msa_ref']
input_many_ref = params['input_many_ref']
ws_id = input_many_ref.split('/')[0]
#### Get the input_many object
##
try:
ws = Workspace(self.workspaceURL, token=ctx['token'])
#objects = ws.get_objects([{'ref': input_many_ref}])
objects = ws.get_objects2({'objects': [{'ref': input_many_ref}]})['data']
input_many_data = objects[0]['data']
info = objects[0]['info']
input_many_name = str(info[1])
many_type_name = info[2].split('.')[1].split('-')[0]
except Exception as e:
raise ValueError('Unable to fetch input_many_name object from workspace: ' + str(e))
#to get the full stack trace: traceback.format_exc()
# Handle overloading (input_many can be SequenceSet, FeatureSet, Genome, or GenomeSet)
#
if many_type_name == 'SequenceSet':
try:
input_many_sequenceSet = input_many_data
except Exception as e:
print(traceback.format_exc())
raise ValueError('Unable to get SequenceSet: ' + str(e))
header_id = input_many_sequenceSet['sequences'][0]['sequence_id']
many_forward_reads_file_path = os.path.join(self.output_dir, header_id + '.fasta')
many_forward_reads_file_handle = open(many_forward_reads_file_path, 'w', 0)
self.log(console, 'writing reads file: ' + str(many_forward_reads_file_path))
for seq_obj in input_many_sequenceSet['sequences']:
header_id = seq_obj['sequence_id']
sequence_str = seq_obj['sequence']
PROT_pattern = re.compile("^[acdefghiklmnpqrstvwyACDEFGHIKLMNPQRSTVWYxX ]+$")
DNA_pattern = re.compile("^[acgtuACGTUnryNRY ]+$")
if DNA_pattern.match(sequence_str):
self.log(invalid_msgs,
"Require protein sequences for target. " +
"BAD nucleotide record for sequence_id: " + header_id + "\n" + sequence_str + "\n")
continue
elif not PROT_pattern.match(sequence_str):
self.log(invalid_msgs, "BAD record for sequence_id: " + header_id + "\n" + sequence_str + "\n")
continue
appropriate_sequence_found_in_many_input = True
many_forward_reads_file_handle.write('>' + header_id + "\n")
many_forward_reads_file_handle.write(sequence_str + "\n")
many_forward_reads_file_handle.close()
self.log(console, 'done')
# we're going to profile genome refs for all target types, even if only one object.
# note: we are calling it 'genome_refs' even if the object is an AMA
#
genome_refs = []
# FeatureSet
#
if many_type_name == 'FeatureSet':
# retrieve sequences for features
input_many_featureSet = input_many_data
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
FeatureSetToFASTA_params = {
'featureSet_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%genome_ref%%' + genome_id_feature_id_delim + '%%feature_id%%',
'record_desc_pattern': '[%%genome_ref%%]',
'case': 'upper',
'linewrap': 50,
'merge_fasta_files': 'TRUE'
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'dev'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
FeatureSetToFASTA_retVal = DOTFU.FeatureSetToFASTA(FeatureSetToFASTA_params)
many_forward_reads_file_path = FeatureSetToFASTA_retVal['fasta_file_path']
feature_ids_by_genome_ref = FeatureSetToFASTA_retVal['feature_ids_by_genome_ref']
if len(feature_ids_by_genome_ref.keys()) > 0:
appropriate_sequence_found_in_many_input = True
genome_refs = sorted(feature_ids_by_genome_ref.keys())
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "FeatureSetToFasta() took "+str(end_time-beg_time)+" secs")
# Genome
#
elif many_type_name == 'Genome':
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
GenomeToFASTA_params = {
'genome_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%feature_id%%',
'record_desc_pattern': '[%%genome_id%%]',
'case': 'upper',
'linewrap': 50
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'dev'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
GenomeToFASTA_retVal = DOTFU.GenomeToFASTA(GenomeToFASTA_params)
many_forward_reads_file_path = GenomeToFASTA_retVal['fasta_file_path']
feature_ids = GenomeToFASTA_retVal['feature_ids']
if len(feature_ids) > 0:
appropriate_sequence_found_in_many_input = True
genome_refs = [input_many_ref]
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "Genome2Fasta() took "+str(end_time-beg_time)+" secs")
# GenomeSet
#
elif many_type_name == 'GenomeSet':
input_many_genomeSet = input_many_data
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
for genome_id in input_many_genomeSet['elements']:
genome_ref = input_many_genomeSet['elements'][genome_id]['ref']
if genome_ref not in genome_refs:
genome_refs.append(genome_ref)
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
GenomeSetToFASTA_params = {
'genomeSet_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%genome_ref%%' + genome_id_feature_id_delim + '%%feature_id%%',
'record_desc_pattern': '[%%genome_ref%%]',
'case': 'upper',
'linewrap': 50,
'merge_fasta_files': 'TRUE'
}
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'dev'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
GenomeSetToFASTA_retVal = DOTFU.GenomeSetToFASTA(GenomeSetToFASTA_params)
many_forward_reads_file_path = GenomeSetToFASTA_retVal['fasta_file_path_list'][0]
feature_ids_by_genome_id = GenomeSetToFASTA_retVal['feature_ids_by_genome_id']
if len(feature_ids_by_genome_id.keys()) > 0:
appropriate_sequence_found_in_many_input = True
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "FeatureSetToFasta() took "+str(end_time-beg_time)+" secs")
# AnnotatedMetagenomeAssembly
#
elif many_type_name == 'AnnotatedMetagenomeAssembly':
many_forward_reads_file_dir = self.output_dir
many_forward_reads_file = input_many_name + ".fasta"
# DEBUG
#beg_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
AnnotatedMetagenomeAssemblyToFASTA_params = {
'ama_ref': input_many_ref,
'file': many_forward_reads_file,
'dir': many_forward_reads_file_dir,
'console': console,
'invalid_msgs': invalid_msgs,
'residue_type': 'protein',
'feature_type': 'CDS',
'record_id_pattern': '%%feature_id%%',
'record_desc_pattern': '[%%genome_id%%]',
'case': 'upper',
'linewrap': 50
}
genome_refs = [input_many_ref]
#self.log(console,"callbackURL='"+self.callbackURL+"'") # DEBUG
#SERVICE_VER = 'release'
SERVICE_VER = 'beta'
DOTFU = KBaseDataObjectToFileUtils(url=self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
AnnotatedMetagenomeAssemblyToFASTA_retVal = DOTFU.AnnotatedMetagenomeAssemblyToFASTA (AnnotatedMetagenomeAssemblyToFASTA_params)
many_forward_reads_file_path = AnnotatedMetagenomeAssemblyToFASTA_retVal['fasta_file_path']
feature_ids = AnnotatedMetagenomeAssemblyToFASTA_retVal['feature_ids']
if len(feature_ids) > 0:
appropriate_sequence_found_in_many_input = True
# DEBUG
#end_time = (datetime.utcnow() - datetime.utcfromtimestamp(0)).total_seconds()
#self.log(console, "Genome2Fasta() took "+str(end_time-beg_time)+" secs")
# Missing proper input_many_type
#
else:
raise ValueError('Cannot yet handle input_many type of: ' + many_type_name)
# Get total number of sequences in input_many search db
#
seq_total = 0
if many_type_name == 'SequenceSet':
seq_total = len(input_many_sequenceSet['sequences'])
elif many_type_name == 'FeatureSet':
seq_total = len(input_many_featureSet['elements'].keys())
elif many_type_name == 'Genome' or many_type_name == 'AnnotatedMetagenomeAssembly':
seq_total = len(feature_ids)
elif many_type_name == 'GenomeSet':
for genome_id in feature_ids_by_genome_id.keys():
seq_total += len(feature_ids_by_genome_id[genome_id])
#### Get the input_msa_refs
##
if 'input_msa_refs' in params and len(params['input_msa_refs']) != 0:
input_msa_refs = params['input_msa_refs']
else:
input_msa_refs = []
ws = Workspace(self.workspaceURL, token=ctx['token'])
try:
msa_obj_info_list = ws.list_objects({'ids': [ws_id], 'type': "KBaseTrees.MSA"})
except Exception as e:
raise ValueError("Unable to list MSA objects from workspace: " +
str(params['workspace_name']) + " " + str(e))
for info in msa_obj_info_list:
[OBJID_I, NAME_I, TYPE_I, SAVE_DATE_I, VERSION_I, SAVED_BY_I, WSID_I,
WORKSPACE_I, CHSUM_I, SIZE_I, META_I] = range(11) # object_info tuple
input_msa_ref = str(info[WSID_I]) + '/' + str(info[OBJID_I]) + '/' + str(info[VERSION_I])
input_msa_refs.append(input_msa_ref)
#### write the MSAs to file and collect names and desc
##
input_msa_names = []
input_msa_descs = []
appropriate_sequence_found_in_MSA_input = False
msa_needs_skipping = False
keep_msa = []
msa_invalid_msgs = []
for msa_i, input_msa_ref in enumerate(input_msa_refs):
keep_msa.append(False)
try:
ws = Workspace(self.workspaceURL, token=ctx['token'])
#objects = ws.get_objects([{'ref': input_msa_ref}])
objects = ws.get_objects2({'objects': [{'ref': input_msa_ref}]})['data']
input_msa_data = objects[0]['data']
info = objects[0]['info']
input_msa_name = str(info[1])
msa_type_name = info[2].split('.')[1].split('-')[0]
except Exception as e:
raise ValueError('Unable to fetch ' + input_msa_name + ' object from workspace: ' + str(e))
#to get the full stack trace: traceback.format_exc()
# set hmmer_dir
hmmer_dir = os.path.join(self.output_dir, input_msa_name)
if not os.path.exists(hmmer_dir):
os.makedirs(hmmer_dir)
if msa_type_name != 'MSA':
raise ValueError('Cannot yet handle input_msa type of: ' + msa_type_name)
else:
self.log(console, "\n\nPROCESSING MSA " + input_msa_name + "\n") # DEBUG
input_msa_names.append(input_msa_name)
MSA_in = input_msa_data
if 'description' in MSA_in and MSA_in['description'] != None and MSA_in['description'] != '':
input_msa_descs.append(MSA_in['description'])
else:
input_msa_descs.append(input_msa_name)
row_order = []
default_row_labels = dict()
if 'row_order' in MSA_in.keys():
row_order = MSA_in['row_order']
else:
row_order = sorted(MSA_in['alignment'].keys())
if 'default_row_labels' in MSA_in.keys():
default_row_labels = MSA_in['default_row_labels']
else:
for row_id in row_order:
default_row_labels[row_id] = row_id
# export features to CLUSTAL formatted MSA (HMMER BUILD seems to only take CLUSTAL)
input_MSA_file_path = os.path.join(hmmer_dir, input_msa_name + ".clustal")
self.log(console, 'writing MSA file: ' + input_MSA_file_path)
# set header
header = 'CLUSTAL W (1.81) multiple sequence alignment'
# get longest id
longest_row_id_len = 0
for row_id in row_order:
if len(row_id) > longest_row_id_len:
longest_row_id_len = len(row_id)
# make sure rows are all same length
row_id_0 = row_order[0]
row_len = len(MSA_in['alignment'][row_id_0])
for row_id in row_order:
if len(MSA_in['alignment'][row_id]) != row_len:
raise ValueError("MSA alignment rows are not constant length")
# get alignment line (just storing identity markers)
conservation_symbol = ''
for i in range(row_len):
first_seen_char = MSA_in['alignment'][row_id_0][i]
symbol = '*'
for row_id in row_order:
if MSA_in['alignment'][row_id][i] == '-' or MSA_in['alignment'][row_id][i] != first_seen_char:
symbol = ' '
break
conservation_symbol += symbol
# break up MSA into 60 char chunks
records = []
chunk_len = 60
whole_chunks = int(math.floor(row_len / chunk_len))
if whole_chunks > 0:
for j in range(whole_chunks):
records.append('')
for row_id in row_order:
padding = ''
if longest_row_id_len - len(row_id) > 0:
for i in range(0, longest_row_id_len - len(row_id)):
padding += ' '
records.append(row_id + padding + " " +
MSA_in['alignment'][row_id][j * chunk_len:(j + 1) * chunk_len])
records.append(''.join([' ' for s in range(longest_row_id_len)]) + " " +
conservation_symbol[j * chunk_len:(j + 1) * chunk_len])
# add final rows
if (row_len % chunk_len) != 0:
j = whole_chunks
records.append('')
for row_id in row_order:
padding = ''
if longest_row_id_len - len(row_id) > 0:
for i in range(0, longest_row_id_len - len(row_id)):
padding += ' '
records.append(row_id + padding + " " +
MSA_in['alignment'][row_id][j * chunk_len:row_len])
records.append(''.join([' ' for s in range(longest_row_id_len)]) + " " +
conservation_symbol[j * chunk_len:row_len])
# write that sucker
with open(input_MSA_file_path, 'w', 0) as input_MSA_file_handle:
input_MSA_file_handle.write(header + "\n")
input_MSA_file_handle.write("\n".join(records) + "\n")
# DEBUG
#report += "MSA:\n"
#report += header+"\n"
#report += "\n".join(records)+"\n"
#self.log(console,report)
# Determine whether nuc or protein sequences
#
self.log(console, "CHECKING MSA for PROTEIN seqs...") # DEBUG
(this_appropriate_sequence_found_in_MSA_input, these_msa_invalid_msgs) = \
self._check_MSA_sequence_type_correct(MSA_in, row_order, 'PROTEIN')
msa_invalid_msgs.extend(these_msa_invalid_msgs)
if this_appropriate_sequence_found_in_MSA_input:
keep_msa[msa_i] = True
appropriate_sequence_found_in_MSA_input = True
else:
keep_msa[msa_i] = False
msa_needs_skipping = True
self.log(msa_invalid_msgs, "no protein sequences found in '" + input_msa_name + "'")
# revise MSA lists to remove non-protein MSAs
if not appropriate_sequence_found_in_MSA_input:
self.log(invalid_msgs, "no protein sequences found in any MSA")
self.log(invalid_msgs, "\n".join(msa_invalid_msgs))
elif msa_needs_skipping:
new_msa_refs = []
new_msa_names = []
new_msa_descs = []
self.log(console, "SKIPPING non-protein MSA " + input_msa_names[msa_i])
self.log(console, "\n".join(msa_invalid_msgs))
for msa_i, msa_ref in enumerate(input_msa_refs):
if keep_msa[msa_i]:
new_msa_refs.append(input_msa_refs[msa_i])
new_msa_names.append(input_msa_names[msa_i])
new_msa_descs.append(input_msa_descs[msa_i])
input_msa_refs = new_msa_refs
input_msa_names = new_msa_names
input_msa_descs = new_msa_descs
# check for failed input file creation
#
# if not appropriate_sequence_found_in_one_input:
# self.log(invalid_msgs,"no protein sequences found in '"+input_one_name+"'")
if not appropriate_sequence_found_in_many_input:
self.log(invalid_msgs, "no protein sequences found in '" + input_many_name + "'")
# input data failed validation. Need to return
#
if len(invalid_msgs) > 0:
# load the method provenance from the context object
#
self.log(console, "SETTING PROVENANCE") # DEBUG
provenance = [{}]
if 'provenance' in ctx:
provenance = ctx['provenance']
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = []
# provenance[0]['input_ws_objects'].append(input_one_ref)
provenance[0]['input_ws_objects'].append(input_many_ref)
for input_msa_ref in input_msa_refs:
provenance[0]['input_ws_objects'].append(input_msa_ref)
provenance[0]['service'] = 'kb_hmmer'
provenance[0]['method'] = search_tool_name + '_Search'
# build output report object
#
self.log(console, "BUILDING REPORT") # DEBUG
report += "FAILURE:\n\n" + "\n".join(invalid_msgs) + "\n"
reportObj = {
'objects_created': [],
'text_message': report
}
reportName = 'hmmer_report_' + str(uuid.uuid4())
ws = Workspace(self.workspaceURL, token=ctx['token'])
report_obj_info = ws.save_objects({
#'id':info[6],
'workspace': params['workspace_name'],
'objects': [
{
'type': 'KBaseReport.Report',
'data': reportObj,
'name': reportName,
'meta': {},
'hidden': 1,
'provenance': provenance # DEBUG
}
]
})[0]
self.log(console, "BUILDING RETURN OBJECT")
returnVal = {'report_name': reportName,
'report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]),
}
self.log(console, search_tool_name + "_Search DONE")
return [returnVal]
#### iterate through MSAs and scan input_many DBs
##
total_hit_cnts = []
accepted_hit_cnts = []
output_hit_TAB_file_paths = []
output_hit_MSA_file_paths = []
output_filtered_fasta_file_paths = []
output_hits_flags = []
objects_created_refs = []
coalesced_sequenceObjs = []
coalesce_featureIds_element_ordering = []
coalesce_featureIds_genome_ordering = []
html_report_chunks = []
hit_cnt_by_genome_and_model = dict()
model_len = []
for msa_i, input_msa_ref in enumerate(input_msa_refs):
# init hit counts
total_hit_cnts.append(0)
accepted_hit_cnts.append(0)
html_report_chunks.append(None)
### set paths
#
input_msa_name = input_msa_names[msa_i]
hmmer_dir = os.path.join(self.output_dir, input_msa_name) # this must match above
input_MSA_file_path = os.path.join(hmmer_dir, input_msa_name + ".clustal")
#output_aln_file_path = os.path.join(hmmer_dir, input_msa_name+'.alnout.txt');
#output_extra_file_path = os.path.join(hmmer_dir, input_msa_name+'.alnout_extra.txt');
#output_filtered_fasta_file_path = os.path.join(hmmer_dir, input_msa_name+'.output_filtered.faa');
### Build HMM from MSA
#
# SYNTAX (from http://eddylab.org/software/hmmer3/3.1b2/Userguide.pdf)
#
# hmmbuild --informat fasta <hmmfile.out> <msafile>
#
hmmer_build_bin = self.HMMER_BUILD
hmmer_build_cmd = [hmmer_build_bin]
# check for necessary files
if not os.path.isfile(hmmer_build_bin):
raise ValueError("no such file '" + hmmer_build_bin + "'")
if not os.path.isfile(input_MSA_file_path):
raise ValueError("no such file '" + input_MSA_file_path + "'")
elif not os.path.getsize(input_MSA_file_path) > 0:
raise ValueError("empty file '" + input_MSA_file_path + "'")
HMM_file_path = input_MSA_file_path + ".HMM"
hmmer_build_cmd.append('--informat')
hmmer_build_cmd.append('CLUSTAL')
hmmer_build_cmd.append(HMM_file_path)
hmmer_build_cmd.append(input_MSA_file_path)
# Run HMMER_BUILD, capture output as it happens
#
self.log(console, 'RUNNING HMMER_BUILD:')
self.log(console, ' ' + ' '.join(hmmer_build_cmd))
#report += "\n"+'running HMMER_BUILD:'+"\n"
#report += ' '+' '.join(hmmer_build_cmd)+"\n"
p = subprocess.Popen(hmmer_build_cmd,
cwd=self.output_dir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False)
while True:
line = p.stdout.readline()
if not line:
break
#self.log(console, line.replace('\n', ''))
p.stdout.close()
p.wait()
self.log(console, 'return code: ' + str(p.returncode))
if p.returncode != 0:
raise ValueError('Error running HMMER_BUILD, return code: ' + str(p.returncode) +
'\n\n' + '\n'.join(console))
# Check for HMM output
if not os.path.isfile(HMM_file_path):
raise ValueError("HMMER_BUILD failed to create HMM file '" + HMM_file_path + "'")
elif not os.path.getsize(HMM_file_path) > 0:
raise ValueError("HMMER_BUILD created empty HMM file '" + HMM_file_path + "'")
# get model len
model_len.append(0)
with open (HMM_file_path, 'r') as HMM_handle:
for HMM_line in HMM_handle.readlines():
if HMM_line.startswith('LENG '):
model_len[msa_i] = int(HMM_line.replace('LENG ','').strip())
break
if model_len[msa_i] == 0:
raise ValueError ("No length found in HMM file")
### Construct the HMMER_SEARCH command
#
# SYNTAX (from http://eddylab.org/software/hmmer3/3.1b2/Userguide.pdf)
#
# hmmsearch --tblout <TAB_out> -A <MSA_out> --noali --notextw -E <e_value> -T <bit_score> <hmmfile> <seqdb>
#
hmmer_search_bin = self.HMMER_SEARCH
hmmer_search_cmd = [hmmer_search_bin]
# check for necessary files
if not os.path.isfile(hmmer_search_bin):
raise ValueError("no such file '" + hmmer_search_bin + "'")
if not os.path.isfile(HMM_file_path):
raise ValueError("no such file '" + HMM_file_path + "'")
elif not os.path.getsize(HMM_file_path):
raise ValueError("empty file '" + HMM_file_path + "'")
if not os.path.isfile(many_forward_reads_file_path):
raise ValueError("no such file '" + many_forward_reads_file_path + "'")
elif not os.path.getsize(many_forward_reads_file_path):
raise ValueError("empty file '" + many_forward_reads_file_path + "'")
output_hit_TAB_file_path = os.path.join(hmmer_dir, input_msa_name + '.hitout.txt')
output_hit_MSA_file_path = os.path.join(hmmer_dir, input_msa_name + '.msaout.txt')
output_filtered_fasta_file_path = os.path.join(hmmer_dir, input_msa_name + '.output_filtered.fasta')
output_hit_TAB_file_paths.append(output_hit_TAB_file_path)
output_hit_MSA_file_paths.append(output_hit_MSA_file_path)
output_filtered_fasta_file_paths.append(output_filtered_fasta_file_path)
# this is command for basic search mode
hmmer_search_cmd.append('--tblout')
hmmer_search_cmd.append(output_hit_TAB_file_path)
hmmer_search_cmd.append('-A')
hmmer_search_cmd.append(output_hit_MSA_file_path)
hmmer_search_cmd.append('--noali')
hmmer_search_cmd.append('--notextw')
hmmer_search_cmd.append('-E') # can't use -T with -E, so we'll use -E
hmmer_search_cmd.append(str(params['e_value']))
hmmer_search_cmd.append(HMM_file_path)
hmmer_search_cmd.append(many_forward_reads_file_path)
# options
#if 'maxaccepts' in params:
# if params['maxaccepts']:
# hmmer_search_cmd.append('-max_target_seqs')
# hmmer_search_cmd.append(str(params['maxaccepts']))
# Run HMMER, capture output as it happens
#
self.log(console, 'RUNNING HMMER_SEARCH:')
self.log(console, ' ' + ' '.join(hmmer_search_cmd))
#report += "\n"+'running HMMER_SEARCH:'+"\n"
#report += ' '+' '.join(hmmer_search_cmd)+"\n"
p = subprocess.Popen(hmmer_search_cmd,
cwd=self.output_dir,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False)
while True:
line = p.stdout.readline()
if not line:
break
#self.log(console, line.replace('\n', ''))
p.stdout.close()
p.wait()
self.log(console, 'return code: ' + str(p.returncode))
if p.returncode != 0:
raise ValueError('Error running HMMER_SEARCH, return code: ' + str(p.returncode) +
'\n\n' + '\n'.join(console))
# Check for output
if not os.path.isfile(output_hit_TAB_file_path):
raise ValueError("HMMER_SEARCH failed to create TAB file '" + output_hit_TAB_file_path + "'")
elif not os.path.getsize(output_hit_TAB_file_path) > 0:
raise ValueError("HMMER_SEARCH created empty TAB file '" + output_hit_TAB_file_path + "'")
if not os.path.isfile(output_hit_MSA_file_path):
raise ValueError("HMMER_SEARCH failed to create MSA file '" + output_hit_MSA_file_path + "'")
elif not os.path.getsize(output_hit_MSA_file_path) > 0:
#raise ValueError("HMMER_SEARCH created empty MSA file '"+output_hit_MSA_file_path+"'")
self.log(console, "HMMER_SEARCH created empty MSA file '" + output_hit_MSA_file_path + "'")
objects_created_refs.append(None)
continue
# DEBUG
#self.log(console, "DEBUG: output_hit_TAB_file_path: '"+str(output_hit_TAB_file_path))
#self.log(console, "DEBUG: output_hit_MSA_file_path: '"+str(output_hit_MSA_file_path))
#report = "TAB:\n\n"
#with open (output_hit_TAB_file_path, 'r') as output_handle:
# for line in output_handle:
# report += line+"\n"
#report += "\n\nMSA:\n\n"
#with open (output_hit_MSA_file_path, 'r') as output_handle:
# for line in output_handle:
# report += line+"\n"
#self.log(console, report)
# Get hit beg and end positions from Stockholm format MSA output
#
self.log(console, 'PARSING HMMER SEARCH MSA OUTPUT')
hit_beg = dict()
hit_end = dict()
longest_alnlen = dict()
with open(output_hit_MSA_file_path, 'r', 0) as output_hit_MSA_file_handle:
for MSA_out_line in output_hit_MSA_file_handle.readlines():
MSA_out_line = MSA_out_line.strip()
if MSA_out_line.startswith('#=GS '):
hit_rec = re.sub('#=GS ', '', MSA_out_line)
hit_rec = re.sub('\s+.*?$', '', hit_rec)
hit_range = re.sub('^.*\/', '', hit_rec)
hit_id = re.sub('\/[^\/]+$', '', hit_rec)
(beg_str, end_str) = hit_range.split('-')
beg = int(beg_str)
end = int(end_str)
this_alnlen = abs(end - beg) + 1
if hit_id in hit_beg:
if this_alnlen > longest_alnlen[hit_id]:
hit_beg[hit_id] = int(beg_str)
hit_end[hit_id] = int(end_str)
longest_alnlen[hit_id] = this_alnlen
else:
hit_beg[hit_id] = int(beg_str)
hit_end[hit_id] = int(end_str)
longest_alnlen[hit_id] = this_alnlen
# Measure length of hit sequences
#
self.log(console, 'MEASURING HIT GENES LENGTHS')
hit_seq_len = dict()
with open(many_forward_reads_file_path, 'r', 0) as many_forward_reads_file_handle:
last_id = None
last_buf = ''
for fasta_line in many_forward_reads_file_handle.readlines():
fasta_line = fasta_line.strip()
if fasta_line.startswith('>'):
if last_id != None:
id_untrans = last_id
id_trans = re.sub('\|', ':', id_untrans)
#if id_untrans in hit_order or id_trans in hit_order:
if id_untrans in hit_beg or id_trans in hit_beg:
hit_seq_len[last_id] = len(last_buf)
header = re.sub('^>', '', fasta_line)
last_id = re.sub('\s+.*?$', '', header)
last_buf = ''
else:
last_buf += fasta_line
if last_id != None:
id_untrans = last_id
id_trans = re.sub('\|', ':', id_untrans)
#if id_untrans in hit_order or id_trans in hit_order:
if id_untrans in hit_beg or id_trans in hit_beg:
hit_seq_len[last_id] = len(last_buf)
### Parse the HMMER tabular output and store ids to filter many set to make filtered object to save back to KBase
#
self.log(console, 'PARSING HMMER SEARCH TAB OUTPUT')
hit_seq_ids = dict()
accept_fids = dict()
output_hit_TAB_file_handle = open(output_hit_TAB_file_path, "r", 0)
output_aln_buf = output_hit_TAB_file_handle.readlines()
output_hit_TAB_file_handle.close()
accepted_hit_cnt = 0
high_bitscore_line = dict()
high_bitscore_score = dict()
#high_bitscore_ident = dict()
#longest_alnlen = dict()
hit_order = []
hit_buf = []
hit_accept_something = False
#header_done = False
for line in output_aln_buf:
if line.startswith('#'):
#if not header_done:
# hit_buf.append(line)
continue
#header_done = True
#self.log(console,'HIT LINE: '+line) # DEBUG
hit_info = re.split('\s+', line)
hit_seq_id = hit_info[0]
hit_accession = hit_info[1]
query_name = hit_info[2]
query_accession = hit_info[3]
hit_e_value = float(hit_info[4])
hit_bitscore = float(hit_info[5])
hit_bias = float(hit_info[6])
hit_e_value_best_dom = float(hit_info[7])
hit_bitscore_best_dom = float(hit_info[8])
hit_bias_best_dom = float(hit_info[9])
hit_expected_dom_n = float(hit_info[10])
hit_regions = float(hit_info[11])
hit_regions_multidom = float(hit_info[12])
hit_overlaps = float(hit_info[13])
hit_envelopes = float(hit_info[14])
hit_dom_n = float(hit_info[15])
hit_doms_within_rep_thresh = float(hit_info[16])
hit_doms_within_inc_thresh = float(hit_info[17])
hit_desc = hit_info[18]
try:
if hit_bitscore > high_bitscore_score[hit_seq_id]:
high_bitscore_score[hit_seq_id] = hit_bitscore
high_bitscore_line[hit_seq_id] = line
except:
if hit_seq_id in hit_seq_len:
hit_order.append(hit_seq_id)
high_bitscore_score[hit_seq_id] = hit_bitscore
high_bitscore_line[hit_seq_id] = line
else:
self.log(console, "ALERT!!! HIT "+hit_seq_id+" not found in MSA alignment and is likely a very weak hit (E-value is "+str(hit_e_value)+" and bitscore is "+str(hit_bitscore)+". SKIPPING HIT.")
filtering_fields = dict()
total_hit_cnts[msa_i] = len(hit_order)
for hit_seq_id in hit_order:
hit_buf.append(high_bitscore_line[hit_seq_id])
filtering_fields[hit_seq_id] = dict()
filter = False
#self.log(console,"HIT_SEQ_ID: '"+hit_seq_id+"'")
#if 'ident_thresh' in params and float(params['ident_thresh']) > float(high_bitscore_ident[hit_seq_id]):
# continue
if 'bitscore' in params and float(params['bitscore']) > float(high_bitscore_score[hit_seq_id]):
filter = True
filtering_fields[hit_seq_id]['bitscore'] = True
if 'model_cov_perc' in params and float(params['model_cov_perc']) > 100.0 * float(longest_alnlen[hit_seq_id]) / float(model_len[msa_i]):
filter = True
filtering_fields[hit_seq_id]['model_cov_perc'] = True
if 'maxaccepts' in params and params['maxaccepts'] != None and accepted_hit_cnt == int(params['maxaccepts']):
filter = True
filtering_fields[hit_seq_id]['maxaccepts'] = True
if filter:
continue
hit_accept_something = True
accepted_hit_cnt += 1
hit_seq_ids[hit_seq_id] = True
self.log(console, "HIT: '" + hit_seq_id + "'") # DEBUG
# capture accepted hit count by genome_ref and model
if many_type_name == 'Genome' or many_type_name == 'AnnotatedMetagenomeAssembly':
genome_ref = input_many_ref
else:
genome_ref = hit_seq_id.split(genome_id_feature_id_delim)[0]
self.log(console, "DEBUG: genome_ref: '" + str(genome_ref) + "'")
self.log(console, "DEBUG: input_msa_name: '" + str(input_msa_name) + "'")
if genome_ref not in hit_cnt_by_genome_and_model:
hit_cnt_by_genome_and_model[genome_ref] = dict()
if input_msa_name not in hit_cnt_by_genome_and_model[genome_ref]:
hit_cnt_by_genome_and_model[genome_ref][input_msa_name] = 0
hit_cnt_by_genome_and_model[genome_ref][input_msa_name] += 1
# DEBUG
self.log(console, "DEBUG: incrementing hit count for "+genome_ref+" MODEL: "+input_msa_name)
accepted_hit_cnts[msa_i] = accepted_hit_cnt
#
### Create output objects
#
if accepted_hit_cnt == 0:
self.log(console, 'THERE WERE NO ACCEPTED HITS. NOT BUILDING OUTPUT OBJECT')
else:
self.log(console, 'EXTRACTING ACCEPTED HITS FROM INPUT')
self.log(console, 'MANY_TYPE_NAME: ' + many_type_name) # DEBUG
# SequenceSet input -> SequenceSet output
#
if many_type_name == 'SequenceSet':
output_sequenceSet = dict()
if 'sequence_set_id' in input_many_sequenceSet and input_many_sequenceSet['sequence_set_id'] != None:
output_sequenceSet['sequence_set_id'] = input_many_sequenceSet['sequence_set_id'] + \
"." + search_tool_name + "_Search_filtered"
else:
output_sequenceSet['sequence_set_id'] = search_tool_name + "_Search_filtered"
if 'description' in input_many_sequenceSet and input_many_sequenceSet['description'] != None:
output_sequenceSet['description'] = input_many_sequenceSet['description'] + \
" - " + search_tool_name + "_Search filtered"
else:
output_sequenceSet['description'] = search_tool_anme + "_Search filtered"
self.log(console, "ADDING SEQUENCES TO SEQUENCESET")
output_sequenceSet['sequences'] = []
for seq_obj in input_many_sequenceSet['sequences']:
header_id = seq_obj['sequence_id']
#header_desc = seq_obj['description']
#sequence_str = seq_obj['sequence']
id_untrans = header_id
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT '+header_id) # DEBUG
accept_fids[id_untrans] = True
output_sequenceSet['sequences'].append(seq_obj)
# FeatureSet input -> FeatureSet output
#
elif many_type_name == 'FeatureSet':
output_featureSet = dict()
if 'description' in input_many_featureSet and input_many_featureSet['description'] != None:
output_featureSet['description'] = input_many_featureSet['description'] + \
" - " + search_tool_name + "_Search filtered"
else:
output_featureSet['description'] = search_tool_name + "_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
fId_list = input_many_featureSet['elements'].keys()
self.log(console, "ADDING FEATURES TO FEATURESET")
for fId in sorted(fId_list):
for genome_ref in input_many_featureSet['elements'][fId]:
id_untrans = genome_ref + genome_id_feature_id_delim + fId
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT '+fId) # DEBUG
accept_fids[id_untrans] = True
#fId = id_untrans # don't change fId for output FeatureSet
try:
this_genome_ref_list = output_featureSet['elements'][fId]
except:
output_featureSet['elements'][fId] = []
output_featureSet['element_ordering'].append(fId)
output_featureSet['elements'][fId].append(genome_ref)
# Parse Genome hits into FeatureSet
#
elif many_type_name == 'Genome':
output_featureSet = dict()
# if 'scientific_name' in input_many_genome and input_many_genome['scientific_name'] != None:
# output_featureSet['description'] = input_many_genome['scientific_name'] + " - "+search_tool_name+"_Search filtered"
# else:
# output_featureSet['description'] = search_tool_name+"_Search filtered"
output_featureSet['description'] = search_tool_name + "_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
for fid in feature_ids:
id_untrans = fid
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT '+fid) # DEBUG
#output_featureSet['element_ordering'].append(fid)
accept_fids[id_untrans] = True
#fid = input_many_ref+genome_id_feature_id_delim+id_untrans # don't change fId for output FeatureSet
output_featureSet['element_ordering'].append(fid)
output_featureSet['elements'][fid] = [input_many_ref]
# Parse GenomeSet hits into FeatureSet
#
elif many_type_name == 'GenomeSet':
output_featureSet = dict()
if 'description' in input_many_genomeSet and input_many_genomeSet['description'] != None:
output_featureSet['description'] = input_many_genomeSet['description'] + \
" - " + search_tool_name + "_Search filtered"
else:
output_featureSet['description'] = search_tool_name + "_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
self.log(console, "READING HITS FOR GENOMES") # DEBUG
for genome_id in feature_ids_by_genome_id.keys():
self.log(console, "READING HITS FOR GENOME " + genome_id) # DEBUG
genome_ref = input_many_genomeSet['elements'][genome_id]['ref']
for feature_id in feature_ids_by_genome_id[genome_id]:
id_untrans = genome_ref + genome_id_feature_id_delim + feature_id
id_trans = re.sub('\|', ':', id_untrans)
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
#self.log(console, 'FOUND HIT: '+feature['id']) # DEBUG
#output_featureSet['element_ordering'].append(feature['id'])
accept_fids[id_untrans] = True
#feature_id = id_untrans # don't change fId for output FeatureSet
try:
this_genome_ref_list = output_featureSet['elements'][feature_id]
except:
output_featureSet['elements'][feature_id] = []
output_featureSet['element_ordering'].append(feature_id)
output_featureSet['elements'][feature_id].append(genome_ref)
# Parse AnnotatedMetagenomeAssembly hits into FeatureSet
#
elif many_type_name == 'AnnotatedMetagenomeAssembly':
seq_total = 0
output_featureSet = dict()
# if 'scientific_name' in input_many_genome and input_many_genome['scientific_name'] != None:
# output_featureSet['description'] = input_many_genome['scientific_name'] + " - "+search_tool_name+"_Search filtered"
# else:
# output_featureSet['description'] = search_tool_name+"_Search filtered"
output_featureSet['description'] = search_tool_name+"_Search filtered"
output_featureSet['element_ordering'] = []
output_featureSet['elements'] = dict()
for fid in feature_ids:
#if fid == 'AWN69_RS07145' or fid == 'AWN69_RS13375':
# self.log(console, 'CHECKING FID '+fid) # DEBUG
seq_total += 1
id_untrans = fid
id_trans = re.sub ('\|',':',id_untrans)
#print ("TESTING FEATURES: ID_UNTRANS: '"+id_untrans+"'") # DEBUG
#print ("TESTING FEATURES: ID_TRANS: '"+id_trans+"'") # DEBUG
if id_trans in hit_seq_ids or id_untrans in hit_seq_ids:
self.log(console, 'FOUND HIT '+fid) # DEBUG
#output_featureSet['element_ordering'].append(fid)
accept_fids[id_untrans] = True
#fid = input_many_ref+self.genome_id_feature_id_delim+id_untrans # don't change fId for output FeatureSet
ama_ref = params['input_many_ref']
output_featureSet['element_ordering'].append(fid)
output_featureSet['elements'][fid] = [ama_ref]
# load the method provenance from the context object
#
self.log(console, "SETTING PROVENANCE") # DEBUG
provenance = [{}]
if 'provenance' in ctx:
provenance = ctx['provenance']
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = []
# provenance[0]['input_ws_objects'].append(input_one_ref)
provenance[0]['input_ws_objects'].append(input_msa_ref)
provenance[0]['input_ws_objects'].append(input_many_ref)
provenance[0]['service'] = 'kb_blast'
provenance[0]['method'] = search_tool_name + '_Search'
### Create output object
#
if 'coalesce_output' in params and int(params['coalesce_output']) == 1: # This is broken, but also never should have been offered and is now disabled.
if len(invalid_msgs) == 0:
if len(hit_seq_ids.keys()) == 0: # Note, this is after filtering, so there may be more unfiltered hits
self.log(console, "No Object to Upload for MSA " + input_msa_name) # DEBUG
objects_created_refs.append(None)
continue
# accumulate hits into coalesce object
#
if many_type_name == 'SequenceSet': # input many SequenceSet -> save SequenceSet
for seq_obj in output_sequenceSet['sequences']:
coalesced_sequenceObjs.append(seq_obj)
else: # input FeatureSet, Genome, and GenomeSet -> upload FeatureSet output
for fId in output_featureSet['element_ordering']:
coalesce_featureIds_element_ordering.append(fId)
#coalesce_featureIds_genome_ordering.append(output_featureSet['elements'][fId][0])
for this_genome_ref in output_featureSet['elements'][fId]:
coalesce_featureIds_genome_ordering.append(this_genome_ref)
else: # keep output separate Upload results if coalesce_output is 0
output_name = input_msa_name + '-' + params['output_filtered_name']
if len(invalid_msgs) == 0:
if len(hit_seq_ids.keys()) == 0: # Note, this is after filtering, so there may be more unfiltered hits
self.log(console, "No Object to Upload for MSA " + input_msa_name) # DEBUG
objects_created_refs.append(None)
continue
self.log(console, "Uploading results Object MSA " + input_msa_name) # DEBUG
# input many SequenceSet -> save SequenceSet
#
if many_type_name == 'SequenceSet':
new_obj_info = ws.save_objects({
'workspace': params['workspace_name'],
'objects': [{
'type': 'KBaseSequences.SequenceSet',
'data': output_sequenceSet,
'name': output_name,
'meta': {},
'provenance': provenance
}]
})[0]
else: # input FeatureSet, Genome, and GenomeSet -> upload FeatureSet output
new_obj_info = ws.save_objects({
'workspace': params['workspace_name'],
'objects': [{
'type': 'KBaseCollections.FeatureSet',
'data': output_featureSet,
'name': output_name,
'meta': {},
'provenance': provenance
}]
})[0]
[OBJID_I, NAME_I, TYPE_I, SAVE_DATE_I, VERSION_I, SAVED_BY_I, WSID_I,
WORKSPACE_I, CHSUM_I, SIZE_I, META_I] = range(11) # object_info tuple
objects_created_refs.append(str(new_obj_info[WSID_I]) + '/' + str(new_obj_info[OBJID_I]))
#### Build output report chunks
##
self.log(console, "BUILDING REPORT CHUNK for MSA[" + str(msa_i) + "] " + input_msa_names[msa_i]) # DEBUG
if len(invalid_msgs) == 0:
# text report
#
report += 'MSA[' + str(msa_i) + ']: ' + input_msa_names[msa_i] + "\n"
report += 'sequences in search db: ' + str(seq_total) + "\n"
report += 'sequences in hit set: ' + str(total_hit_cnts[msa_i]) + "\n"
report += 'sequences in accepted hit set: ' + str(accepted_hit_cnts[msa_i]) + "\n"
report += "\n"
#for line in hit_buf:
# report += line
self.log(console, report)
# build html report chunk
if many_type_name == 'Genome':
feature_id_to_function = GenomeToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = GenomeToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = GenomeToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'GenomeSet':
feature_id_to_function = GenomeSetToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = GenomeSetToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = GenomeSetToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'FeatureSet':
feature_id_to_function = FeatureSetToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = FeatureSetToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = FeatureSetToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'AnnotatedMetagenomeAssembly':
feature_id_to_function = AnnotatedMetagenomeAssemblyToFASTA_retVal['feature_id_to_function']
ama_ref_to_obj_name = AnnotatedMetagenomeAssemblyToFASTA_retVal['ama_ref_to_obj_name']
head_color = "#eeeeff"
border_head_color = "#ffccff"
accept_row_color = 'white'
#reject_row_color = '#ffeeee'
reject_row_color = '#eeeeee'
reject_cell_color = '#ffcccc'
text_fontsize = "2"
text_color = '#606060'
border_body_color = "#cccccc"
bar_width = 100
bar_height = 15
bar_color = "lightblue"
bar_line_color = "#cccccc"
bar_fontsize = "1"
bar_char = "."
cellpadding = "3"
cellspacing = "2"
border = "0"
html_report_chunk = []
for line in hit_buf:
line = line.strip()
if line == '' or line.startswith('#'):
continue
[hit_id, hit_accession, query_name, query_accession, e_value, bit_score, bias, e_value_best_dom, bit_score_best_dom, bias_best_dom, expected_dom_n,
regions, regions_multidom, overlaps, envelopes, dom_n, doms_within_rep_thresh, doms_within_inc_thresh, hit_desc] = re.split('\s+', line)[0:19]
# [query_id, hit_id, identity, aln_len, mismatches, gap_openings, q_beg, q_end, h_beg, h_end, e_value, bit_score] = line.split("\t")[0:12]
# identity = str(round(float(identity), 1))
# if identity == '100.0': identity = '100'
# get coords with respect to hit sequence
h_len = hit_seq_len[hit_id]
h_beg = hit_beg[hit_id]
h_end = hit_end[hit_id]
aln_len = abs(h_end - h_beg) + 1
aln_len_perc = round(100.0 * float(aln_len) / float(model_len[msa_i]), 1)
#if many_type_name == 'SingleEndLibrary':
# pass
#elif many_type_name == 'SequenceSet':
if many_type_name == 'SequenceSet':
pass
elif many_type_name == 'Genome' or \
many_type_name == 'AnnotatedMetagenomeAssembly' or \
many_type_name == 'GenomeSet' or \
many_type_name == 'FeatureSet':
if 'Set' in many_type_name:
[genome_ref, hit_fid] = hit_id.split(genome_id_feature_id_delim)
else:
genome_ref = input_many_ref
hit_fid = hit_id
# can't just use hit_fid because may have pipes translated and can't translate back
fid_lookup = None
for fid in feature_id_to_function[genome_ref].keys():
id_untrans = fid
id_trans = re.sub('\|', ':', id_untrans)
#self.log (console, "SCANNING FIDS. HIT_FID: '"+str(hit_fid)+"' FID: '"+str(fid)+"' TRANS: '"+str(id_trans)+"'") # DEBUG
if id_untrans == hit_fid or id_trans == hit_fid:
#self.log (console, "GOT ONE!") # DEBUG
if many_type_name == 'Genome' or many_type_name == 'AnnotatedMetagenomeAssembly':
accept_id = fid
elif many_type_name == 'GenomeSet' or many_type_name == 'FeatureSet':
accept_id = genome_ref + genome_id_feature_id_delim + fid
if accept_id in accept_fids:
row_color = accept_row_color
else:
row_color = reject_row_color
fid_lookup = fid
break
#self.log (console, "HIT_FID: '"+str(hit_fid)+"' FID_LOOKUP: '"+str(fid_lookup)+"'") # DEBUG
if fid_lookup == None:
raise ValueError("unable to find fid for hit_fid: '" + str(hit_fid))
elif fid_lookup not in feature_id_to_function[genome_ref]:
raise ValueError("unable to find function for fid: '" + str(fid_lookup))
fid_disp = re.sub(r"^.*\.([^\.]+)\.([^\.]+)$", r"\1.\2", fid_lookup)
func_disp = feature_id_to_function[genome_ref][fid_lookup]
# set genome_disp_name
if many_type_name == 'AnnotatedMetagenomeAssembly':
genome_disp_name = ama_ref_to_obj_name[genome_ref]
else:
genome_obj_name = genome_ref_to_obj_name[genome_ref]
genome_sci_name = genome_ref_to_sci_name[genome_ref]
[ws_id, obj_id, genome_obj_version] = genome_ref.split('/')
genome_disp_name = ''
if 'obj_name' in params['genome_disp_name_config']:
genome_disp_name += genome_obj_name
if 'ver' in params['genome_disp_name_config']:
genome_disp_name += '.v'+str(genome_obj_version)
if 'sci_name' in params['genome_disp_name_config']:
genome_disp_name += ': '+genome_sci_name
# build html report table line
html_report_chunk += ['<tr bgcolor="' + row_color + '">']
#html_report_chunk += ['<tr bgcolor="'+'white'+'">'] # DEBUG
# add overlap bar
# coverage graphic (with respect to hit seq)
html_report_chunk += ['<td valign=middle align=center style="border-right:solid 1px ' +
border_body_color + '; border-bottom:solid 1px ' + border_body_color + '">']
html_report_chunk += ['<table style="height:' + str(bar_height) + 'px; width:' + str(
bar_width) + 'px" border=0 cellpadding=0 cellspacing=0>']
full_len_pos = bar_width
aln_beg_pos = int(float(bar_width) * float(int(h_beg) - 1) / float(int(h_len) - 1))
aln_end_pos = int(float(bar_width) * float(int(h_end) - 1) / float(int(h_len) - 1))
cell_pix_height = str(int(round(float(bar_height) / 3.0, 0)))
cell_color = ['', '', '']
cell_width = []
cell_width.append(aln_beg_pos)
cell_width.append(aln_end_pos - aln_beg_pos)
cell_width.append(bar_width - aln_end_pos)
for row_i in range(3):
html_report_chunk += ['<tr style="height:' + cell_pix_height + 'px">']
unalign_color = row_color
if row_i == 1:
unalign_color = bar_line_color
cell_color[0] = unalign_color
cell_color[1] = bar_color
cell_color[2] = unalign_color
for col_i in range(3):
cell_pix_width = str(cell_width[col_i])
cell_pix_color = cell_color[col_i]
html_report_chunk += ['<td style="height:' + cell_pix_height +
'px; width:' + cell_pix_width + 'px" bgcolor="' + cell_pix_color + '"></td>']
html_report_chunk += ['</tr>']
html_report_chunk += ['</table>']
html_report_chunk += ['</td>']
# add other cells
# fid
html_report_chunk += ['<td style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + str(fid_disp) + '</font></td>']
# html_report_chunk += ['<td style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(hit_accession)+'</font></td>']
# func
html_report_chunk += ['<td style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + func_disp + '</font></td>']
# genome name
html_report_chunk += ['<td style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + genome_disp_name + '</font></td>']
# ident
# if 'ident_thresh' in filtering_fields[hit_id]:
# this_cell_color = reject_cell_color
# else:
# this_cell_color = row_color
# html_report_chunk += ['<td align=center bgcolor="'+this_cell_color+'" style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+str(identity)+'%</font></td>']
# aln len
if 'model_cov_perc' in filtering_fields[hit_id]:
this_cell_color = reject_cell_color
else:
this_cell_color = row_color
html_report_chunk += ['<td align=center bgcolor="' + str(this_cell_color) + '" style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + str(aln_len) + ' (' + str(aln_len_perc) + '%)</font></td>']
# evalue
html_report_chunk += ['<td align=center style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '><nobr>' + str(e_value) + '</nobr></font></td>']
# bit score
if 'bitscore' in filtering_fields[hit_id]:
this_cell_color = reject_cell_color
else:
this_cell_color = row_color
html_report_chunk += ['<td align=center bgcolor="' + str(this_cell_color) + '" style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' +
border_body_color + '"><font color="' + text_color + '" size=' + text_fontsize + '><nobr>' + str(bit_score) + '</nobr></font></td>']
# bias
# html_report_chunk += ['<td align=center style="border-right:solid 1px '+border_body_color+'; border-bottom:solid 1px '+border_body_color+'"><font color="'+text_color+'" size='+text_fontsize+'><nobr>'+str(bias)+'</nobr><br><nobr>('+str(bias_best_dom)+')</nobr></font></td>']
# aln coords only for hit seq
html_report_chunk += ['<td align=center style="border-right:solid 1px ' + border_body_color + '; border-bottom:solid 1px ' + border_body_color +
'"><font color="' + text_color + '" size=' + text_fontsize + '><nobr>' + str(h_beg) + '-' + str(h_end) + '</nobr></font></td>']
# close chunk
html_report_chunk += ['</tr>']
# attach chunk
if total_hit_cnts[msa_i] == 0:
self.log(console, "NO HITS FOR MSA[" + str(msa_i) + "] " +
input_msa_names[msa_i] + ". NOT ADDING TO HTML HIT REPORT.")
html_report_chunk_str = '<tr><td colspan=table_col_width><blockquote><i>no hits found</i></td></tr>'
else:
html_report_chunk_str = "\n".join(html_report_chunk)
html_report_chunks[msa_i] = html_report_chunk_str
#self.log(console, "HTML_REPORT_CHUNK: '"+str(html_report_chunk_str)+"'") # DEBUG
#### Create and Upload output objects if coalesce_output is true
##
if 'coalesce_output' in params and int(params['coalesce_output']) == 1:
output_name = params['output_filtered_name']
if len(invalid_msgs) == 0:
if not hit_accept_something:
self.log(console, "No Object to Upload for all MSAs") # DEBUG
else:
self.log(console, "Uploading results Object") # DEBUG
if many_type_name == 'SequenceSet': # input many SequenceSet -> save SequenceSet
output_sequenceSet['sequences'] = coalesced_sequenceObjs
new_obj_info = ws.save_objects({
'workspace': params['workspace_name'],
'objects': [{
'type': 'KBaseSequences.SequenceSet',
'data': output_sequenceSet,
'name': output_name,
'meta': {},
'provenance': provenance
}]
})[0]
else: # input FeatureSet, Genome, and GenomeSet -> upload FeatureSet output
output_featureSet['element_ordering'] = coalesce_featureIds_element_ordering
output_featureSet['elements'] = dict()
for f_i, fId in enumerate(output_featureSet['element_ordering']):
output_featureSet['elements'][fId] = []
output_featureSet['elements'][fId].append(coalesce_featureIds_genome_ordering[f_i])
new_obj_info = ws.save_objects({
'workspace': params['workspace_name'],
'objects': [{
'type': 'KBaseCollections.FeatureSet',
'data': output_featureSet,
'name': output_name,
'meta': {},
'provenance': provenance
}]
})[0]
[OBJID_I, NAME_I, TYPE_I, SAVE_DATE_I, VERSION_I, SAVED_BY_I, WSID_I,
WORKSPACE_I, CHSUM_I, SIZE_I, META_I] = range(11) # object_info tuple
objects_created_refs.append(str(new_obj_info[WSID_I]) + '/' + str(new_obj_info[OBJID_I]))
#### Set paths for output HTML
##
html_output_dir = os.path.join(self.output_dir, 'html_output')
if not os.path.exists(html_output_dir):
os.makedirs(html_output_dir)
html_search_file = search_tool_name + '_Search.html'
html_search_path = os.path.join(html_output_dir, html_search_file)
html_profile_file = search_tool_name + '_Profile.html'
html_profile_path = os.path.join(html_output_dir, html_profile_file)
#### Build Search output report (and assemble html chunks)
##
self.log(console, "BUILDING SEARCH REPORT ") # DEBUG
if len(invalid_msgs) == 0:
# build html report
if many_type_name == 'Genome':
feature_id_to_function = GenomeToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = GenomeToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = GenomeToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'GenomeSet':
feature_id_to_function = GenomeSetToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = GenomeSetToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = GenomeSetToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'FeatureSet':
feature_id_to_function = FeatureSetToFASTA_retVal['feature_id_to_function']
genome_ref_to_obj_name = FeatureSetToFASTA_retVal['genome_ref_to_obj_name']
genome_ref_to_sci_name = FeatureSetToFASTA_retVal['genome_ref_to_sci_name']
elif many_type_name == 'AnnotatedMetagenomeAssembly':
feature_id_to_function = AnnotatedMetagenomeAssemblyToFASTA_retVal['feature_id_to_function']
ama_ref_to_obj_name = AnnotatedMetagenomeAssemblyToFASTA_retVal['ama_ref_to_obj_name']
sp = ' '
head_color = "#eeeeff"
border_head_color = "#ffccff"
accept_row_color = 'white'
#reject_row_color = '#ffeeee'
reject_row_color = '#eeeeee'
reject_cell_color = '#ffcccc'
text_fontsize = "2"
text_color = '#606060'
header_tab_fontsize = "3"
header_tab_color = '#606060'
border_body_color = "#cccccc"
bar_width = 100
bar_height = 15
bar_color = "lightblue"
bar_line_color = "#cccccc"
bar_fontsize = "1"
bar_char = "."
cellpadding = "3"
cellspacing = "2"
border = "0"
table_col_width = 8
html_report_lines = []
html_report_lines += ['<html>']
html_report_lines += ['<head>']
html_report_lines += ['<title>KBase HMMER Custom Model Search Hits</title>']
html_report_lines += ['</head>']
html_report_lines += ['<body bgcolor="white">']
if many_type_name == 'GenomeSet':
html_report_lines += ['<a href="' + html_profile_file + '"><font color="' + header_tab_color + '" size=' + header_tab_fontsize +
'>TABULAR PROFILE</font></a> | <font color="' + header_tab_color + '" size=' + header_tab_fontsize + '><b>SEARCH HITS</b></font>']
html_report_lines += ['<p>']
html_report_lines += ['<table cellpadding=' + cellpadding +
' cellspacing = ' + cellspacing + ' border=' + border + '>']
html_report_lines += ['<tr bgcolor="' + head_color + '">']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' + border_head_color +
'"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'ALIGNMENT COVERAGE (HIT SEQ)' + '</font></td>']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'GENE ID' + '</font></td>']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'FUNCTION' + '</font></td>']
html_report_lines += ['<td style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'GENOME' + '</font></td>']
# html_report_lines += ['<td align=center style="border-right:solid 2px '+border_head_color+'; border-bottom:solid 2px '+border_head_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+'IDENT'+'%</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'ALN_LEN' + '</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'E-VALUE' + '</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + 'BIT SCORE' + '</font></td>']
html_report_lines += ['<td align=center style="border-right:solid 2px ' + border_head_color + '; border-bottom:solid 2px ' +
border_head_color + '"><font color="' + text_color + '" size=' + text_fontsize + '>' + '<nobr>H_BEG-H_END</nobr>' + '</font></td>']
# html_report_lines += ['<td align=center style="border-right:solid 2px '+border_head_color+'; border-bottom:solid 2px '+border_head_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+'MIS MATCH'+'</font></td>']
# html_report_lines += ['<td align=center style="border-right:solid 2px '+border_head_color+'; border-bottom:solid 2px '+border_head_color+'"><font color="'+text_color+'" size='+text_fontsize+'>'+'GAP OPEN'+'</font></td>']
html_report_lines += ['</tr>']
for msa_i, input_msa_name in enumerate(input_msa_names):
html_report_lines += ['<tr><td colspan=table_col_width>Hits to <b>' +
str(input_msa_name) + '</b></td></tr>']
if total_hit_cnts[msa_i] == 0 or html_report_chunks[msa_i] == None or html_report_chunks[msa_i] == '':
html_report_lines += ['<tr><td colspan=table_col_width><blockquote><i>no hits found</i></td></tr>']
else:
#html_report_lines.extend(html_report_chunks[msa_i])
html_report_lines += [html_report_chunks[msa_i]]
html_report_lines += ['<tr><td colspan=table_col_width>' + sp + '</td></tr>']
html_report_lines += ['</table>']
html_report_lines += ['</body>']
html_report_lines += ['</html>']
# write html to file
html_path = html_search_path
html_report_str = "\n".join(html_report_lines)
with open(html_path, 'w', 0) as html_handle:
html_handle.write(html_report_str)
#### Build Profile output report
##
self.log(console, "BUILDING PROFILE REPORT ") # DEBUG
#if len(invalid_msgs) == 0 and many_type_name == 'GenomeSet':
if len(invalid_msgs) == 0:
# calculate table
#
cats = input_msa_names
table_data = dict()
INSANE_VALUE = 10000000000000000
if params.get('low_val') and params['low_val'] != 'detect':
overall_low_val = float(params['low_val'])
else:
overall_low_val = INSANE_VALUE
overall_high_val = -INSANE_VALUE
cat_seen = dict()
# count raw
for genome_ref in genome_refs:
if genome_ref not in table_data:
table_data[genome_ref] = dict()
for cat in cats:
table_data[genome_ref][cat] = 0
if genome_ref not in hit_cnt_by_genome_and_model:
continue
for cat in cats:
if cat in hit_cnt_by_genome_and_model[genome_ref] and \
hit_cnt_by_genome_and_model[genome_ref][cat] != 0:
table_data[genome_ref][cat] = hit_cnt_by_genome_and_model[genome_ref][cat]
cat_seen[cat] = True
# determine high and low val
for genome_ref in genome_refs:
for cat in cats:
val = table_data[genome_ref][cat]
if val == 0:
continue
#self.log (console, "HIGH VAL SCAN CAT: '"+cat+"' VAL: '"+str(val)+"'") # DEBUG
if val > overall_high_val:
overall_high_val = val
if val < overall_low_val:
overall_low_val = val
if overall_high_val == -INSANE_VALUE:
error_msg = "unable to find any counts"
self.log(invalid_msgs, error_msg)
provenance = [{}]
if 'provenance' in ctx:
provenance = ctx['provenance']
# add additional info to provenance here, in this case the input data object reference
provenance[0]['input_ws_objects'] = []
provenance[0]['input_ws_objects'].append(input_msa_ref)
provenance[0]['input_ws_objects'].append(input_many_ref)
provenance[0]['service'] = 'kb_hmmer'
provenance[0]['method'] = search_tool_name + '_Search'
report += "FAILURE\n\n" + "\n".join(invalid_msgs) + "\n"
reportObj = {
'objects_created': [],
'text_message': report
}
reportName = 'hmmer_report_' + str(uuid.uuid4())
report_obj_info = ws.save_objects({
# 'id':info[6],
'workspace': params['workspace_name'],
'objects': [
{
'type': 'KBaseReport.Report',
'data': reportObj,
'name': reportName,
'meta': {},
'hidden': 1,
'provenance': provenance
}
]
})[0]
report_info = dict()
report_info['name'] = report_obj_info[1]
report_info['ref'] = str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4])
self.log(console, "BUILDING RETURN OBJECT")
returnVal = {'report_name': report_info['name'],
'report_ref': report_info['ref']
}
return [returnVal]
# build html report
sp = ' '
text_color = "#606060"
text_color_2 = "#606060"
head_color_1 = "#eeeeee"
head_color_2 = "#eeeeee"
border_color = "#cccccc"
border_cat_color = "#ffccff"
#graph_color = "lightblue"
#graph_width = 100
#graph_char = "."
graph_char = sp
#color_list = ['0','1','2','3','4','5','6','7','8','9','a','b','c','d','e']
#color_list = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd']
color_list = [
"#333333",
"#222266",
"#222299",
"#2222bb",
"#2222dd",
"#2222ff",
"#4444ff",
"#6666ff",
"#8888ff",
"#aaaaff",
"#ccccff"]
max_color = len(color_list) - 1
cat_disp_trunc_len = 40
cell_width = '20'
cell_height = '18'
#corner_radius = str(int(0.2*int(cell_width)+0.5))
corner_radius = '5'
if len(genome_refs) > 20:
graph_gen_fontsize = "1"
# elif len(genome_refs) > 10:
# graph_gen_fontsize = "2"
else:
# graph_gen_fontsize = "3"
graph_gen_fontsize = "2"
if len(cats) > 20:
graph_cat_fontsize = "1"
# elif len(cats) > 5:
# graph_cat_fontsize = "2"
else:
# graph_cat_fontsize = "3"
graph_cat_fontsize = "2"
if int(graph_cat_fontsize) < int(graph_gen_fontsize):
cell_fontsize = graph_gen_fontsize = graph_cat_fontsize
else:
cell_fontsize = graph_cat_fontsize = graph_gen_fontsize
#graph_padding = "5"
graph_padding = "1"
graph_spacing = "3"
#border = "1"
border = "0"
#row_spacing = "-2"
num_rows = len(genome_refs)
show_groups = False
show_blanks = False
if 'show_blanks' in params and int(params['show_blanks']) == 1:
show_blanks = True
# build html buffer
html_report_lines = []
html_report_lines += ['<html>']
html_report_lines += ['<head>']
html_report_lines += ['<title>KBase HMMER Custom Model Profile</title>']
html_report_lines += ['<style>']
html_report_lines += [
".horz-text {\ndisplay: inline-block;\nfont-family: Tahoma, Geneva, sans-serif;\ntext-decoration: none;\n}"]
html_report_lines += [
".vertical-text {\ndisplay: inline-block;\nfont-family: Tahoma, Geneva, sans-serif;\ntext-decoration: none;\nwidth: 0.65em;\n}\n.vertical-text__inner {\ndisplay: inline-block;\nwhite-space: nowrap;\nline-height: 1.1;\ntransform: translate(0,100%) rotate(-90deg);\ntransform-origin: 0 0;\n}\n.vertical-text__inner:after {\ncontent: \"\";\ndisplay: block;\nmargin: 0.0em 0 100%;\n}"]
html_report_lines += [
".vertical-text_title {\ndisplay: inline-block;\nwidth: 1.0em;\n}\n.vertical-text__inner_title {\ndisplay: inline-block;\nwhite-space: nowrap;\nline-height: 1.0;\ntransform: translate(0,100%) rotate(-90deg);\ntransform-origin: 0 0;\n}\n.vertical-text__inner_title:after {\ncontent: \"\";\ndisplay: block;\nmargin: 0.0em 0 100%;\n}"]
# add colors as style for DIV
for color_i,color_val in enumerate(color_list):
html_report_lines += [".heatmap_cell-"+str(color_i)+" {\nwidth: "+str(cell_width)+"px;\nheight: "+str(cell_height)+"px;\nborder-radius: "+str(corner_radius)+"px;\nbackground-color: "+str(color_val)+";\btext-align: center;\n}"]
html_report_lines += ['</style>']
html_report_lines += ['</head>']
html_report_lines += ['<body bgcolor="white">']
html_report_lines += ['<font color="' + header_tab_color + '" size=' + header_tab_fontsize + '><b>TABULAR PROFILE</b></font> | <a href="' +
html_search_file + '"><font color="' + header_tab_color + '" size=' + header_tab_fontsize + '>SEARCH HITS</font></a>']
html_report_lines += ['<p>']
# genomes as rows
if 'vertical' in params and int(params['vertical']) == 1:
# table header
html_report_lines += ['<table cellpadding=' + graph_padding +
' cellspacing=' + graph_spacing + ' border=' + border + '>']
corner_rowspan = "1"
label = ''
html_report_lines += ['<tr>']
html_report_lines += ['<td valign=bottom align=right rowspan=' + corner_rowspan +
'><div class="vertical-text_title"><div class="vertical-text__inner_title"><font color="' + text_color + '">' + sp + label + '</font></div></div></td>']
# column headers
for cat_i, cat in enumerate(cats):
if not cat_seen.get(cat) and not show_blanks:
continue
cat_disp = cat
cell_title = input_msa_descs[cat_i]
if len(cat_disp) > cat_disp_trunc_len + 1:
cat_disp = cat_disp[0:cat_disp_trunc_len] + '*'
if cat_disp.lower().endswith('.msa'):
cat_disp = re.sub ("(?i).msa$", "", cat_disp)
html_report_lines += ['<td style="border-right:solid 2px ' + border_cat_color + '; border-bottom:solid 2px ' +
border_cat_color + '" bgcolor="' + head_color_2 + '"title="' + cell_title + '" valign=bottom align=center>']
html_report_lines += ['<div class="vertical-text"><div class="vertical-text__inner">']
html_report_lines += ['<font color="' + text_color_2 + '" size=' + graph_cat_fontsize + '><b>']
#for c_i,c in enumerate(cat_disp):
# if c_i < len(cat_disp)-1:
# html_report_lines += [c+'<br>']
# else:
# html_report_lines += [c]
html_report_lines += ['<nobr>'+sp]
html_report_lines += [cat_disp]
html_report_lines += ['</nobr></b></font>']
html_report_lines += ['</div></div>']
html_report_lines += ['</td>']
html_report_lines += ['</tr>']
# rest of rows
for genome_ref in genome_refs:
# set genome_disp_name
if many_type_name == 'AnnotatedMetagenomeAssembly':
genome_disp_name = ama_ref_to_obj_name[genome_ref]
else:
genome_obj_name = genome_ref_to_obj_name[genome_ref]
genome_sci_name = genome_ref_to_sci_name[genome_ref]
[ws_id, obj_id, genome_obj_version] = genome_ref.split('/')
genome_disp_name = ''
if 'obj_name' in params['genome_disp_name_config']:
genome_disp_name += genome_obj_name
if 'ver' in params['genome_disp_name_config']:
genome_disp_name += '.v'+str(genome_obj_version)
if 'sci_name' in params['genome_disp_name_config']:
genome_disp_name += ': '+genome_sci_name
# build html report table line
html_report_lines += ['<tr>']
html_report_lines += ['<td align=right><div class="horz-text"><font color="' + text_color + '" size=' +
graph_gen_fontsize + '><b><nobr>' + genome_disp_name + sp + '</nobr></b></font></div></td>']
for cat in cats:
if not cat_seen.get(cat) and not show_blanks:
continue
val = table_data[genome_ref][cat]
if not cat_seen.get(cat) or val == 0:
html_report_lines += ['<td bgcolor=white></td>']
continue
elif overall_high_val == overall_low_val:
cell_color_i = 0
else:
cell_color_i = max_color - \
int(round(max_color * (val - overall_low_val) / float(overall_high_val - overall_low_val)))
cell_val = str(table_data[genome_ref][cat]) # the key line
if 'heatmap' in params and params['heatmap'] == '1':
html_report_lines += ['<td title="'+cell_val+'" align=center valign=middle bgcolor=white><div class="heatmap_cell-'+str(cell_color_i)+'"></div></td>']
else:
html_report_lines += ['<td align=center valign=middle style="' + cell_width + 'px; border-right:solid 2px ' + border_color +
'; border-bottom:solid 2px ' + border_color + '"><font color="' + text_color + '" size=' + cell_fontsize + '>' + cell_val + '</font></td>']
html_report_lines += ['</tr>']
html_report_lines += ['</table>']
# genomes as columns
else:
raise ValueError("Do not yet support Genomes as columns")
# key table
html_report_lines += ['<p>']
html_report_lines += ['<table cellpadding=3 cellspacing=2 border=' + border + '>']
html_report_lines += ['<tr><td valign=middle align=left colspan=2 style="border-bottom:solid 4px ' +
border_color + '"><font color="' + text_color + '"><b>KEY</b></font></td></tr>']
for cat_i, cat in enumerate(cats):
cell_color = 'white'
if not cat_seen.get(cat) and not show_blanks:
cell_color = "#eeeeee"
desc = input_msa_descs[cat_i]
cat_disp = cat
if len(cat_disp) > cat_disp_trunc_len + 1:
cat_disp = cat_disp[0:cat_disp_trunc_len] + '*'
if cat_disp.lower().endswith('.msa'):
cat_disp = re.sub ("(?i).msa$", "", cat_disp)
html_report_lines += ['<tr>']
html_report_lines += ['<td valign=middle align=left bgcolor="' + cell_color + '" style="border-right:solid 4px ' +
border_color + '"><div class="horz-text"><font color="' + text_color + '" size=' + graph_cat_fontsize + '>' + cat_disp + '</font></div></td>']
html_report_lines += ['<td valign=middle align=left bgcolor="' + cell_color +
'"><div class="horz-text"><font color="' + text_color + '" size=' + graph_cat_fontsize + '>' + desc + '</font></div></td>']
html_report_lines += ['</tr>']
html_report_lines += ['</table>']
# close
html_report_lines += ['</body>']
html_report_lines += ['</html>']
# write html to file and upload
html_path = html_profile_path
html_report_str = "\n".join(html_report_lines)
with open(html_path, 'w', 0) as html_handle:
html_handle.write(html_report_str)
#### Upload HTML reports
##
self.log(console, "UPLOADING HTML REPORT(s)") # DEBUG
if len(invalid_msgs) == 0:
# Upload HTML Report dir
#
dfu = DFUClient(self.callbackURL)
# upload output html
try:
#HTML_upload_ret = dfu.file_to_shock({'file_path': html_path,
HTML_upload_ret = dfu.file_to_shock({'file_path': html_output_dir,
'make_handle': 0,
'pack': 'zip'})
except:
raise ValueError('Logging exception loading HTML file to shock')
#### Upload output files
##
self.log(console, "UPLOADING OUTPUT FILES") # DEBUG
if len(invalid_msgs) == 0:
output_hit_TAB_dir = os.path.join(self.output_dir, 'HMMER_output_TAB')
output_hit_MSA_dir = os.path.join(self.output_dir, 'HMMER_output_MSA')
if not os.path.exists(output_hit_TAB_dir):
os.makedirs(output_hit_TAB_dir)
if not os.path.exists(output_hit_MSA_dir):
os.makedirs(output_hit_MSA_dir)
for msa_i, input_msa_name in enumerate(input_msa_names):
if total_hit_cnts[msa_i] == 0:
self.log(console, 'SKIPPING UPLOAD OF EMPTY HMMER OUTPUT FOR MSA ' + input_msa_name)
continue
else:
self.log(console, 'PREPPING UPLOAD OF HMMER OUTPUT FOR MSA ' + input_msa_name)
new_hit_TAB_file_path = os.path.join(output_hit_TAB_dir, input_msa_name + '.hitout.txt')
new_hit_MSA_file_path = os.path.join(output_hit_MSA_dir, input_msa_name + '.msaout.txt')
shutil.copy(output_hit_TAB_file_paths[msa_i], new_hit_TAB_file_path)
shutil.copy(output_hit_MSA_file_paths[msa_i], new_hit_MSA_file_path)
# Upload output dirs
TAB_upload_ret = None
MSA_upload_ret = None
self.log(console, 'UPLOADING OF HMMER OUTPUT FOR MSA ' + input_msa_name)
try:
TAB_upload_ret = dfu.file_to_shock({'file_path': output_hit_TAB_dir,
'make_handle': 0,
'pack': 'zip'})
except:
raise ValueError('Logging exception loading TAB output to shock')
try:
MSA_upload_ret = dfu.file_to_shock({'file_path': output_hit_MSA_dir,
'make_handle': 0,
'pack': 'zip'})
except:
raise ValueError('Logging exception loading MSA output to shock')
#### Create report object
##
self.log(console, "CREATING REPORT OBJECT") # DEBUG
if len(invalid_msgs) == 0:
reportName = 'hmmer_report_' + str(uuid.uuid4())
reportObj = {'objects_created': [],
#'text_message': '', # or is it 'message'?
'message': '', # or is it 'text_message'?
'direct_html': None,
'direct_html_link_index': None,
'file_links': [],
'html_links': [],
'workspace_name': params['workspace_name'],
'report_object_name': reportName
}
#html_buf_lim = 16000 # really 16KB, but whatever
#if len(html_report_str) <= html_buf_lim:
# reportObj['direct_html'] = html_report_str
#else:
reportObj['direct_html_link_index'] = 0
reportObj['html_links'] = [{'shock_id': HTML_upload_ret['shock_id'],
'name': html_profile_file,
'label': search_tool_name + ' HTML Report'}
#'description': search_tool_name + ' HTML Report'}
]
if TAB_upload_ret != None:
reportObj['file_links'] += [{'shock_id': TAB_upload_ret['shock_id'],
'name': search_tool_name + '_Search.TAB.zip',
'label': search_tool_name + '-' + ' hits TABLE'}]
if MSA_upload_ret != None:
reportObj['file_links'] += [{'shock_id': MSA_upload_ret['shock_id'],
'name': search_tool_name + '_Search.MSA.zip',
'label': search_tool_name + ' hits MSA'}
]
if hit_accept_something:
if 'coalesce_output' in params and int(params['coalesce_output']) == 1:
for object_created_ref in objects_created_refs:
reportObj['objects_created'].append(
{'ref': object_created_ref, 'description': 'Coalesced' + ' ' + search_tool_name + ' hits'})
else:
#for msa_i, input_msa_name in enumerate(input_msa_names): # DEBUG double check correct alignment of msa_i # FIXME
for msa_i,object_created_ref in enumerate(objects_created_refs):
if object_created_ref == None:
continue
input_msa_name = input_msa_names[msa_i]
if total_hit_cnts[msa_i] == 0:
continue
reportObj['objects_created'].append(
{'ref': objects_created_refs[msa_i], 'description': input_msa_name + ' ' + search_tool_name + ' hits'})
# save report object
#
SERVICE_VER = 'release'
reportClient = KBaseReport(self.callbackURL, token=ctx['token'], service_ver=SERVICE_VER)
#report_info = report.create({'report':reportObj, 'workspace_name':params['workspace_name']})
report_info = reportClient.create_extended_report(reportObj)
#### data validation error
##
if len(invalid_msgs) > 0:
report += "FAILURE\n\n" + "\n".join(invalid_msgs) + "\n"
reportObj = {
'objects_created': [],
'text_message': report
}
reportName = 'hmmer_report_' + str(uuid.uuid4())
report_obj_info = ws.save_objects({
# 'id':info[6],
'workspace': params['workspace_name'],
'objects': [
{
'type': 'KBaseReport.Report',
'data': reportObj,
'name': reportName,
'meta': {},
'hidden': 1,
'provenance': provenance
}
]
})[0]
report_info = dict()
report_info['name'] = report_obj_info[1]
report_info['ref'] = str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4])
#### Return Report
##
self.log(console, "BUILDING RETURN OBJECT")
# returnVal = { 'output_report_name': reportName,
# 'output_report_ref': str(report_obj_info[6]) + '/' + str(report_obj_info[0]) + '/' + str(report_obj_info[4]),
# 'output_filtered_ref': params['workspace_name']+'/'+params['output_filtered_name']
# }
returnVal = {'report_name': report_info['name'],
'report_ref': report_info['ref']
}
self.log(console, search_tool_name + "_Search DONE")
#END HMMER_Local_MSA_Group_Search
# At some point might do deeper type checking...
if not isinstance(returnVal, dict):
raise ValueError('Method HMMER_Local_MSA_Group_Search return value ' +
'returnVal is not type dict as required.')
# return the results
return [returnVal]
def HMMER_dbCAN_Search(self, ctx, params):
"""
Method for HMMER search of dbCAN Markov Models of CAZy families
**
** overloading as follows:
** input_many_ref: SequenceSet, FeatureSet, Genome, GenomeSet, AMA (note: SequenceSet deactivated)
** output_name: SequenceSet (if input_many is SequenceSet), (else) FeatureSet
:param params: instance of type "HMMER_dbCAN_Params" (HMMER dbCAN
Input Params) -> structure: parameter "workspace_name" of type
"workspace_name" (** The workspace object refs are of form: ** **
objects = ws.get_objects([{'ref':
params['workspace_id']+'/'+params['obj_name']}]) ** ** "ref" means
the entire name combining the workspace id and the object name **
"id" is a numerical identifier of the workspace or object, and
should just be used for workspace ** "name" is a string identifier
of a workspace or object. This is received from Narrative.),
parameter "input_dbCAN_AA_ids" of type "data_obj_ref", parameter
"input_dbCAN_CBM_ids" of type "data_obj_ref", parameter
"input_dbCAN_CE_ids" of type "data_obj_ref", parameter
"input_dbCAN_GH_ids" of type "data_obj_ref", parameter
"input_dbCAN_GT_ids" of type "data_obj_ref", parameter
"input_dbCAN_PL_ids" of type "data_obj_ref", parameter
"input_dbCAN_cellulosome_ids" of type "data_obj_ref", parameter
"input_many_refs" of type "data_obj_ref", parameter
"output_filtered_name" of type "data_obj_name", parameter
"genome_disp_name_config" of String, parameter "coalesce_output"
of type "bool", parameter "save_ALL_featureSets" of type "bool",
parameter "e_value" of Double, parameter "bitscore" of Double,
parameter "model_cov_perc" of Double, parameter "maxaccepts" of
Double, parameter "heatmap" of type "bool", parameter "low_val" of
type "bool", parameter "vertical" of type "bool", parameter
"show_blanks" of type "bool"
:returns: instance of type "HMMER_Output" (HMMER Output) ->
structure: parameter "report_name" of type "data_obj_name",
parameter "report_ref" of type "data_obj_ref"
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN HMMER_dbCAN_Search
print('--->\nRunning kb_hmmer.HMMER_dbCAN_Search\nparams:')
print(json.dumps(params, indent=1))
hu = HmmerUtil(self.config, ctx)
params['model_group'] = 'dbCAN'
returnVal = hu.run_HMMER_Model_Group_Search(params)
#END HMMER_dbCAN_Search
# At some point might do deeper type checking...
if not isinstance(returnVal, dict):
raise ValueError('Method HMMER_dbCAN_Search return value ' +
'returnVal is not type dict as required.')
# return the results
return [returnVal]
def HMMER_EnvBioelement_Search(self, ctx, params):
"""
Method for HMMER search of Markov Models of environmental bioelement families
**
** overloading as follows:
** input_many_ref: SequenceSet, FeatureSet, Genome, GenomeSet, AMA (note: SeqquenceSet deactivated)
** output_name: SequenceSet (if input_many is SequenceSet), (else) FeatureSet
:param params: instance of type "HMMER_EnvBioelement_Params" (HMMER
EnvBioelement Input Params) -> structure: parameter
"workspace_name" of type "workspace_name" (** The workspace object
refs are of form: ** ** objects = ws.get_objects([{'ref':
params['workspace_id']+'/'+params['obj_name']}]) ** ** "ref" means
the entire name combining the workspace id and the object name **
"id" is a numerical identifier of the workspace or object, and
should just be used for workspace ** "name" is a string identifier
of a workspace or object. This is received from Narrative.),
parameter "input_EnvBioelement_N_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_H_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_O_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_CFix_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_C1_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_CH4_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_CO_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_S_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_CN_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_CH4N2O_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_Se_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_Metal_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_As_ids" of type "data_obj_ref",
parameter "input_EnvBioelement_Halo_ids" of type "data_obj_ref",
parameter "input_many_refs" of type "data_obj_ref", parameter
"output_filtered_name" of type "data_obj_name", parameter
"genome_disp_name_config" of String, parameter "coalesce_output"
of type "bool", parameter "save_ALL_featureSets" of type "bool",
parameter "e_value" of Double, parameter "bitscore" of Double,
parameter "model_cov_perc" of Double, parameter "maxaccepts" of
Double, parameter "heatmap" of type "bool", parameter "low_val" of
type "bool", parameter "vertical" of type "bool", parameter
"show_blanks" of type "bool"
:returns: instance of type "HMMER_Output" (HMMER Output) ->
structure: parameter "report_name" of type "data_obj_name",
parameter "report_ref" of type "data_obj_ref"
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN HMMER_EnvBioelement_Search
print('--->\nRunning kb_hmmer.HMMER_EnvBioelement_Search\nparams:')
print(json.dumps(params, indent=1))
hu = HmmerUtil(self.config, ctx)
params['model_group'] = 'EnvBioelement'
returnVal = hu.run_HMMER_Model_Group_Search(params)
#END HMMER_EnvBioelement_Search
# At some point might do deeper type checking...
if not isinstance(returnVal, dict):
raise ValueError('Method HMMER_EnvBioelement_Search return value ' +
'returnVal is not type dict as required.')
# return the results
return [returnVal]
def HMMER_PhyloMarkers_Search(self, ctx, params):
"""
Method for HMMER search of Markov Models of phylogenetic marker families
**
** overloading as follows:
** input_many_ref: SequenceSet, FeatureSet, Genome, GenomeSet, AMA (note: SeqquenceSet deactivated)
** output_name: SequenceSet (if input_many is SequenceSet), (else) FeatureSet
:param params: instance of type "HMMER_PhyloMarkers_Params" (HMMER
PhyloMarkers Input Params) -> structure: parameter
"workspace_name" of type "workspace_name" (** The workspace object
refs are of form: ** ** objects = ws.get_objects([{'ref':
params['workspace_id']+'/'+params['obj_name']}]) ** ** "ref" means
the entire name combining the workspace id and the object name **
"id" is a numerical identifier of the workspace or object, and
should just be used for workspace ** "name" is a string identifier
of a workspace or object. This is received from Narrative.),
parameter "input_PhyloMarkers_Univ_ids" of type "data_obj_ref",
parameter "input_PhyloMarkers_B_ribo_pol_ids" of type
"data_obj_ref", parameter "input_PhyloMarkers_B_other_ids" of type
"data_obj_ref", parameter "input_PhyloMarkers_A_ribo_pol_ids" of
type "data_obj_ref", parameter "input_PhyloMarkers_A_other_ids" of
type "data_obj_ref", parameter "input_many_refs" of type
"data_obj_ref", parameter "output_filtered_name" of type
"data_obj_name", parameter "genome_disp_name_config" of String,
parameter "coalesce_output" of type "bool", parameter
"save_ALL_featureSets" of type "bool", parameter "e_value" of
Double, parameter "bitscore" of Double, parameter "model_cov_perc"
of Double, parameter "maxaccepts" of Double, parameter "heatmap"
of type "bool", parameter "low_val" of type "bool", parameter
"vertical" of type "bool", parameter "show_blanks" of type "bool"
:returns: instance of type "HMMER_Output" (HMMER Output) ->
structure: parameter "report_name" of type "data_obj_name",
parameter "report_ref" of type "data_obj_ref"
"""
# ctx is the context object
# return variables are: returnVal
#BEGIN HMMER_PhyloMarkers_Search
print('--->\nRunning kb_hmmer.HMMER_PhyloMarkers_Search\nparams:')
print(json.dumps(params, indent=1))
hu = HmmerUtil(self.config, ctx)
params['model_group'] = 'PhyloMarkers'
returnVal = hu.run_HMMER_Model_Group_Search(params)
#END HMMER_PhyloMarkers_Search
# At some point might do deeper type checking...
if not isinstance(returnVal, dict):
raise ValueError('Method HMMER_PhyloMarkers_Search return value ' +
'returnVal is not type dict as required.')
# return the results
return [returnVal]
def status(self, ctx):
#BEGIN_STATUS
returnVal = {'state': "OK", 'message': "", 'version': self.VERSION,
'git_url': self.GIT_URL, 'git_commit_hash': self.GIT_COMMIT_HASH}
#END_STATUS
return [returnVal]
| 53.721881 | 397 | 0.542175 | 21,834 | 201,081 | 4.668682 | 0.041587 | 0.018051 | 0.01964 | 0.013734 | 0.887292 | 0.863561 | 0.846894 | 0.835004 | 0.827323 | 0.809292 | 0 | 0.006924 | 0.351411 | 201,081 | 3,742 | 398 | 53.736237 | 0.77468 | 0.211785 | 0 | 0.793273 | 0 | 0.002461 | 0.156633 | 0.026641 | 0.008614 | 0 | 0 | 0.000267 | 0 | 1 | 0.004102 | false | 0.001231 | 0.008614 | 0 | 0.02379 | 0.004512 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a2a3d3aecb600fd100fee8a5706db119ceca6262 | 41,915 | py | Python | views/AntdLayout.py | RuixiangS/feffery-antd-docs | c48d34ed657ec8d6893440c0ee6382598c564922 | [
"MIT"
] | 10 | 2021-05-20T06:52:42.000Z | 2022-03-29T08:36:58.000Z | views/AntdLayout.py | RuixiangS/feffery-antd-docs | c48d34ed657ec8d6893440c0ee6382598c564922 | [
"MIT"
] | null | null | null | views/AntdLayout.py | RuixiangS/feffery-antd-docs | c48d34ed657ec8d6893440c0ee6382598c564922 | [
"MIT"
] | 2 | 2021-09-14T07:07:00.000Z | 2021-12-10T01:03:25.000Z | import dash
from dash import html
from dash import dcc
import feffery_antd_components as fac
import feffery_utils_components as fuc
from dash.dependencies import Input, Output, State
from server import app
docs_content = html.Div(
[
html.Div(
[
html.H2(
'AntdLayout(children, id, className, style, **kwargs)',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5'
}
),
fac.AntdBackTop(
containerId='docs-content',
duration=0.6
),
html.Span(
'主要参数说明',
id='主要参数说明',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5',
'fontWeight': 'bold',
'fontSize': '1.2rem'
}
),
fuc.FefferyMarkdown(
markdownStr=open('documents/AntdLayout.md', encoding='utf-8').read()
),
html.Div(
html.Span(
'使用示例',
id='使用示例',
style={
'borderLeft': '4px solid grey',
'padding': '3px 0 3px 10px',
'backgroundColor': '#f5f5f5',
'fontWeight': 'bold',
'fontSize': '1.2rem'
}
),
style={
'marginBottom': '10px'
}
),
html.Div(
[
html.Div(
[
fac.AntdLayout(
[
fac.AntdHeader(
fac.AntdTitle(
'页首示例',
level=2,
style={
'color': 'white',
'margin': '0'
}
),
style={
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
fac.AntdLayout(
[
fac.AntdSider(
html.Div(
fac.AntdTitle(
'侧边栏示例',
level=2,
style={
'margin': '0'
}
),
style={
'alignItems': 'center',
'display': 'flex',
'height': '100%'
}
),
style={
'backgroundColor': 'rgb(240, 242, 245)',
'display': 'flex',
'justifyContent': 'center'
}
),
fac.AntdLayout(
[
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
),
fac.AntdFooter(
html.Div(
fac.AntdTitle(
'页尾示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'rgb(193, 193, 193)',
'height': '40px'
}
)
]
)
],
style={
'height': '536px'
}
)
]
)
],
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
),
fac.AntdDivider(
'经典布局',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
html.Div(
[
fac.AntdLayout(
[
fac.AntdHeader(
fac.AntdTitle(
'页首示例',
level=2,
style={
'color': 'white',
'margin': '0'
}
),
style={
'display': 'flex',
'justifyContent': 'center',
'alignItems': 'center'
}
),
fac.AntdLayout(
[
fac.AntdSider(
html.Div(
fac.AntdTitle(
'侧边栏示例',
level=2,
style={
'margin': '0'
}
),
style={
'alignItems': 'center',
'display': 'flex',
'height': '100%'
}
),
style={
'backgroundColor': 'rgb(240, 242, 245)',
'display': 'flex',
'justifyContent': 'center'
}
),
fac.AntdLayout(
[
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
),
fac.AntdFooter(
html.Div(
fac.AntdTitle(
'页尾示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'rgb(193, 193, 193)',
'height': '40px'
}
)
]
)
],
style={
'height': '536px'
}
)
]
)
],
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='经典布局',
className='div-highlight'
),
html.Div(
[
html.Div(
[
fac.AntdLayout(
[
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
),
fac.AntdSider(
html.Div(
fac.AntdTitle(
'右侧侧边栏',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
)
],
style={
'height': '600px'
}
)
],
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
),
fac.AntdDivider(
'不同的侧边栏位置',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdParagraph(
[
fac.AntdText(' 若需要侧边栏位于右侧,只需要将'),
fac.AntdText('AntdSider', strong=True),
fac.AntdText('在'),
fac.AntdText('AntdLayout', strong=True),
fac.AntdText('中的位置从列表第一调整至列表最后即可'),
]
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
html.Div(
[
fac.AntdLayout(
[
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
),
fac.AntdSider(
html.Div(
fac.AntdTitle(
'右侧侧边栏',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
)
],
style={
'height': '600px'
}
)
],
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='不同的侧边栏位置',
className='div-highlight'
),
html.Div(
[
html.Div(
[
fac.AntdLayout(
[
fac.AntdSider(
collapsible=True,
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
),
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
)
],
style={
'height': '600px'
}
)
],
id='sider-demo',
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
),
fac.AntdDivider(
'可折叠的侧边栏',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
html.Div(
[
fac.AntdLayout(
[
fac.AntdSider(
collapsible=True,
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
),
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
)
],
style={
'height': '600px'
}
)
],
id='sider-demo',
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='可折叠的侧边栏',
className='div-highlight'
),
html.Div(
[
html.Div(
[
fac.AntdLayout(
[
fac.AntdSider(
[
html.Div(
fac.AntdInput(placeholder='输入搜索内容', mode='search'),
style={
'padding': '5px'
}
),
html.Div(
[
fac.AntdMenu(
menuItems=[
{
'component': 'Item',
'props': {
'key': f'图标{icon}',
'title': f'图标{icon}',
'icon': icon
}
}
for icon in [
'home',
'upload',
'bar-chart',
'pie-chart',
'dot-chart',
'line-chart',
'apartment',
'app-store',
'app-store-add',
'bell',
'calculator',
'calendar',
'database',
'history'
]
],
mode='inline'
)
],
style={
'height': '100%',
'overflowY': 'auto'
}
)
],
collapsible=True,
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
),
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
)
],
style={
'height': '600px'
}
)
],
id='sider-demo',
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
),
fac.AntdDivider(
'将其他组件整合入侧边栏',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdParagraph(
[
fac.AntdText(' 侧边栏中放入的其他组件,在侧边栏折叠时会自适应压缩,典型如'),
fac.AntdText('AntdMenu', strong=True),
]
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
html.Div(
[
fac.AntdLayout(
[
fac.AntdSider(
[
html.Div(
fac.AntdInput(placeholder='输入搜索内容', mode='search'),
style={
'padding': '5px'
}
),
html.Div(
[
fac.AntdMenu(
menuItems=[
{
'component': 'Item',
'props': {
'key': f'图标{icon}',
'title': f'图标{icon}',
'icon': icon
}
}
for icon in [
'home',
'upload',
'bar-chart',
'pie-chart',
'dot-chart',
'line-chart',
'apartment',
'app-store',
'app-store-add',
'bell',
'calculator',
'calendar',
'database',
'history'
]
],
mode='inline'
)
],
style={
'height': '100%',
'overflowY': 'auto'
}
)
],
collapsible=True,
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
),
fac.AntdContent(
html.Div(
fac.AntdTitle(
'内容区示例',
level=2,
style={
'margin': '0'
}
),
style={
'display': 'flex',
'height': '100%',
'justifyContent': 'center',
'alignItems': 'center'
}
),
style={
'backgroundColor': 'white'
}
)
],
style={
'height': '600px'
}
)
],
id='sider-demo',
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
)'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='将其他组件整合入侧边栏',
className='div-highlight'
),
html.Div(
[
fac.AntdSpin(
html.Div(
[
fac.AntdLayout(
[
fac.AntdSider(
id='sider-custom-trigger-demo',
collapsible=True,
trigger=None,
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
),
fac.AntdContent(
fac.AntdButton(
'自定义折叠按钮',
id='sider-custom-trigger-button-demo',
type='primary'
),
style={
'backgroundColor': 'white'
}
)
],
style={
'height': '600px'
}
)
],
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
),
text='回调中'
),
fac.AntdDivider(
'自定义侧边栏折叠触发器',
lineColor='#f0f0f0',
innerTextOrientation='left'
),
fac.AntdCollapse(
fuc.FefferySyntaxHighlighter(
showLineNumbers=True,
showInlineLineNumbers=True,
language='python',
codeStyle='coy-without-shadows',
codeString='''
fac.AntdSpin(
html.Div(
[
fac.AntdLayout(
[
fac.AntdSider(
id='sider-custom-trigger-demo',
collapsible=True,
trigger=None,
style={
'backgroundColor': 'rgb(240, 242, 245)'
}
),
fac.AntdContent(
fac.AntdButton(
'自定义折叠按钮',
id='sider-custom-trigger-button-demo',
type='primary'
),
style={
'backgroundColor': 'white'
}
)
],
style={
'height': '600px'
}
)
],
style={
'height': '600px',
'border': '1px solid rgb(241, 241, 241)'
}
),
text='回调中'
)
...
@app.callback(
Output('sider-custom-trigger-demo', 'collapsed'),
Input('sider-custom-trigger-button-demo', 'nClicks'),
State('sider-custom-trigger-demo', 'collapsed'),
prevent_initial_call=True
)
def sider_custom_trigger_demo(nClicks, collapsed):
if nClicks:
return not collapsed
return dash.no_update'''
),
title='点击查看代码',
is_open=False,
ghost=True
)
],
style={
'marginBottom': '40px',
'padding': '10px 10px 20px 10px',
'border': '1px solid #f0f0f0'
},
id='自定义侧边栏折叠触发器',
className='div-highlight'
),
html.Div(style={'height': '100px'})
],
style={
'flex': 'auto'
}
),
html.Div(
fac.AntdAnchor(
linkDict=[
{'title': '主要参数说明', 'href': '#主要参数说明'},
{
'title': '使用示例',
'href': '#使用示例',
'children': [
{'title': '经典布局', 'href': '#经典布局'},
{'title': '不同的侧边栏位置', 'href': '#不同的侧边栏位置'},
{'title': '可折叠的侧边栏', 'href': '#可折叠的侧边栏'},
{'title': '将其他组件整合入侧边栏', 'href': '#将其他组件整合入侧边栏'},
{'title': '自定义侧边栏折叠触发器', 'href': '#自定义侧边栏折叠触发器'},
]
},
],
containerId='docs-content',
targetOffset=200
),
style={
'flex': 'none',
'margin': '20px'
}
)
],
style={
'display': 'flex'
}
)
@app.callback(
Output('sider-custom-trigger-demo', 'collapsed'),
Input('sider-custom-trigger-button-demo', 'nClicks'),
State('sider-custom-trigger-demo', 'collapsed'),
prevent_initial_call=True
)
def sider_custom_trigger_demo(nClicks, collapsed):
if nClicks:
return not collapsed
return dash.no_update
| 44.260824 | 104 | 0.184874 | 1,362 | 41,915 | 5.673275 | 0.146109 | 0.034425 | 0.038825 | 0.034425 | 0.870584 | 0.863336 | 0.863336 | 0.86243 | 0.845347 | 0.845347 | 0 | 0.045794 | 0.747847 | 41,915 | 946 | 105 | 44.307611 | 0.685306 | 0 | 0 | 0.735619 | 0 | 0 | 0.380818 | 0.01107 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001106 | false | 0 | 0.007743 | 0 | 0.013274 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a2d3c5dbc38be10d7307c5e155d4f45a23b1c2b2 | 89 | py | Python | usage.py | Dharma-Sagar/dict-utils | 162c9676b277dd7585f3855346fb4c72817f4b53 | [
"Apache-2.0"
] | null | null | null | usage.py | Dharma-Sagar/dict-utils | 162c9676b277dd7585f3855346fb4c72817f4b53 | [
"Apache-2.0"
] | null | null | null | usage.py | Dharma-Sagar/dict-utils | 162c9676b277dd7585f3855346fb4c72817f4b53 | [
"Apache-2.0"
] | null | null | null | from dict_utils import batch_conv_xlsx_to_dsl
batch_conv_xlsx_to_dsl('input', 'output')
| 22.25 | 45 | 0.842697 | 16 | 89 | 4.125 | 0.6875 | 0.272727 | 0.393939 | 0.454545 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.078652 | 89 | 3 | 46 | 29.666667 | 0.804878 | 0 | 0 | 0 | 0 | 0 | 0.123596 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
0c14843e41782a2476b0cbe324a0d1c876043af5 | 35,898 | py | Python | resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/Pmv/hostappInterface/cinema4d/test/lipid.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | 8 | 2021-12-14T21:30:01.000Z | 2022-02-14T11:30:03.000Z | resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/Pmv/hostappInterface/cinema4d/test/lipid.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | null | null | null | resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/Pmv/hostappInterface/cinema4d/test/lipid.py | J-E-J-S/aaRS-Pipeline | 43f59f28ab06e4b16328c3bc405cdddc6e69ac44 | [
"MIT"
] | null | null | null | #execfile('/Library/MGLTools/1.5.6/MGLToolsPckgs/mglutil/hostappli/lipid.py')
import numpy.oldnumeric as Numeric
from mglutil.hostappli import pdb_c4d as epmv
self=epmv.start(debug=1)
self.browseCommands('superimposeCommandsNew', commands=None, log=0, package='Pmv')
self.readMolecule('/Library/MGLTools/1.5.6/MGLToolsPckgs/mglutil/hostappli/2abm.pdb', ask=0, parser=None, log=1)
self.readMolecule('/Library/MGLTools/1.5.6/MGLToolsPckgs/mglutil/hostappli/2ABM0.pdb', ask=0, parser=None, log=1)
atoms1=self.select("2ABM0: :GLY621:CA/+/2ABM0: :ASP381:CA/+/2ABM0: :PRO518:CA/+/2ABM0: :GLY714:CA/+/2ABM0: :ILE903:CA/+/2ABM0: :GLY425:CA/+/2ABM0: :LEU853:CA/+/2ABM0: :ALA87:CA/+/2ABM0: :LEU453:CA/+/2ABM0: :ARG224:CA/+/2ABM0: :GLY59:CA/+/2ABM0: :VAL829:CA/+/2ABM0: :ASP608:CA/+/2ABM0: :LEU226:CA/+/2ABM0: :SER858:CA/+/2ABM0: :VAL266:CA/+/2ABM0: :ALA188:CA/+/2ABM0: :GLY809:CA/+/2ABM0: :PHE10:CA/+/2ABM0: :THR153:CA/+/2ABM0: :ILE217:CA/+/2ABM0: :HIS515:CA/+/2ABM0: :TYR538:CA/+/2ABM0: :ALA708:CA/+/2ABM0: :PRO531:CA/+/2ABM0: :GLY699:CA/+/2ABM0: :LYS155:CA/+/2ABM0: :HIS288:CA/+/2ABM0: :THR500:CA/+/2ABM0: :ILE393:CA/+/2ABM0: :HIS510:CA/+/2ABM0: :TRP663:CA/+/2ABM0: :ILE632:CA/+/2ABM0: :ALA97:CA/+/2ABM0: :THR748:CA/+/2ABM0: :MET50:CA/+/2ABM0: :ALA323:CA/+/2ABM0: :LEU147:CA/+/2ABM0: :LEU366:CA/+/2ABM0: :SER184:CA/+/2ABM0: :PRO30:CA/+/2ABM0: :GLU235:CA/+/2ABM0: :ALA838:CA/+/2ABM0: :LYS79:CA/+/2ABM0: :LEU325:CA/+/2ABM0: :VAL211:CA/+/2ABM0: :GLY605:CA/+/2ABM0: :GLY475:CA/+/2ABM0: :TYR450:CA/+/2ABM0: :GLY318:CA/+/2ABM0: :GLY319:CA/+/2ABM0: :TYR554:CA/+/2ABM0: :PHE471:CA/+/2ABM0: :SER785:CA/+/2ABM0: :THR906:CA/+/2ABM0: :ILE213:CA/+/2ABM0: :SER814:CA/+/2ABM0: :VAL420:CA/+/2ABM0: :LEU205:CA/+/2ABM0: :THR191:CA/+/2ABM0: :ALA642:CA/+/2ABM0: :THR637:CA/+/2ABM0: :PHE698:CA/+/2ABM0: :ALA264:CA/+/2ABM0: :GLY354:CA/+/2ABM0: :PHE43:CA/+/2ABM0: :ILE894:CA/+/2ABM0: :LEU629:CA/+/2ABM0: :GLY576:CA/+/2ABM0: :GLY741:CA/+/2ABM0: :VAL478:CA/+/2ABM0: :GLY335:CA/+/2ABM0: :GLY301:CA/+/2ABM0: :GLY582:CA/+/2ABM0: :LYS382:CA/+/2ABM0: :GLY246:CA/+/2ABM0: :VAL535:CA/+/2ABM0: :VAL214:CA/+/2ABM0: :CYS247:CA/+/2ABM0: :CYS690:CA/+/2ABM0: :ARG529:CA/+/2ABM0: :PHE52:CA/+/2ABM0: :ILE449:CA/+/2ABM0: :ARG416:CA/+/2ABM0: :ALA784:CA/+/2ABM0: :PHE717:CA/+/2ABM0: :TRP654:CA/+/2ABM0: :GLY896:CA/+/2ABM0: :LEU908:CA/+/2ABM0: :ALA192:CA/+/2ABM0: :ALA201:CA/+/2ABM0: :ALA623:CA/+/2ABM0: :PRO77:CA/+/2ABM0: :LEU459:CA/+/2ABM0: :LYS533:CA/+/2ABM0: :LEU272:CA/+/2ABM0: :MET358:CA/+/2ABM0: :VAL866:CA/+/2ABM0: :PHE279:CA/+/2ABM0: :PRO64:CA/+/2ABM0: :LEU827:CA/+/2ABM0: :ALA494:CA/+/2ABM0: :SER249:CA/+/2ABM0: :HIS174:CA/+/2ABM0: :ALA392:CA/+/2ABM0: :ALA250:CA/+/2ABM0: :TYR575:CA/+/2ABM0: :GLY773:CA/+/2ABM0: :ASP154:CA/+/2ABM0: :THR239:CA/+/2ABM0: :GLY465:CA/+/2ABM0: :GLU365:CA/+/2ABM0: :GLY296:CA/+/2ABM0: :GLY33:CA/+/2ABM0: :GLY443:CA/+/2ABM0: :PHE888:CA/+/2ABM0: :PHE372:CA/+/2ABM0: :VAL441:CA/+/2ABM0: :MET731:CA/+/2ABM0: :VAL317:CA/+/2ABM0: :PHE464:CA/+/2ABM0: :PRO390:CA/+/2ABM0: :MET682:CA/+/2ABM0: :GLY581:CA/+/2ABM0: :TYR677:CA/+/2ABM0: :ASN636:CA/+/2ABM0: :ALA753:CA/+/2ABM0: :ASN867:CA/+/2ABM0: :CYS9:CA/+/2ABM0: :THR872:CA/+/2ABM0: :ARG230:CA/+/2ABM0: :ALA565:CA/+/2ABM0: :ALA875:CA/+/2ABM0: :VAL82:CA/+/2ABM0: :VAL90:CA/+/2ABM0: :GLU819:CA/+/2ABM0: :GLY755:CA/+/2ABM0: :GLY897:CA/+/2ABM0: :LEU849:CA/+/2ABM0: :SER865:CA/+/2ABM0: :GLY282:CA/+/2ABM0: :LEU479:CA/+/2ABM0: :GLY446:CA/+/2ABM0: :VAL438:CA/+/2ABM0: :PHE563:CA/+/2ABM0: :GLN431:CA/+/2ABM0: :ALA768:CA/+/2ABM0: :PHE270:CA/+/2ABM0: :LEU242:CA/+/2ABM0: :ALA792:CA/+/2ABM0: :ILE854:CA/+/2ABM0: :ALA843:CA/+/2ABM0: :PRO439:CA/+/2ABM0: :TRP887:CA/+/2ABM0: :GLY527:CA/+/2ABM0: :LEU589:CA/+/2ABM0: :SER579:CA/+/2ABM0: :ALA292:CA/+/2ABM0: :ALA833:CA/+/2ABM0: :LEU524:CA/+/2ABM0: :SER411:CA/+/2ABM0: :GLY347:CA/+/2ABM0: :GLY801:CA/+/2ABM0: :ALA734:CA/+/2ABM0: :ILE522:CA/+/2ABM0: :TRP427:CA/+/2ABM0: :VAL590:CA/+/2ABM0: :ILE857:CA/+/2ABM0: :LEU368:CA/+/2ABM0: :ILE845:CA/+/2ABM0: :ALA330:CA/+/2ABM0: :PHE161:CA/+/2ABM0: :GLY265:CA/+/2ABM0: :GLU577:CA/+/2ABM0: :HIS377:CA/+/2ABM0: :LEU780:CA/+/2ABM0: :GLU350:CA/+/2ABM0: :ALA23:CA/+/2ABM0: :PRO257:CA/+/2ABM0: :LEU373:CA/+/2ABM0: :LEU816:CA/+/2ABM0: :HIS604:CA/+/2ABM0: :LEU813:CA/+/2ABM0: :THR410:CA/+/2ABM0: :MET455:CA/+/2ABM0: :ALA550:CA/+/2ABM0: :VAL81:CA/+/2ABM0: :SER644:CA/+/2ABM0: :VAL891:CA/+/2ABM0: :GLY255:CA/+/2ABM0: :LEU696:CA/+/2ABM0: :LYS106:CA/+/2ABM0: :ALA619:CA/+/2ABM0: :ILE329:CA/+/2ABM0: :LYS306:CA/+/2ABM0: :ALA873:CA/+/2ABM0: :ALA7:CA/+/2ABM0: :LEU856:CA/+/2ABM0: :LEU98:CA/+/2ABM0: :VAL39:CA/+/2ABM0: :SER331:CA/+/2ABM0: :PRO745:CA/+/2ABM0: :LEU429:CA/+/2ABM0: :PHE435:CA/+/2ABM0: :GLY60:CA/+/2ABM0: :ALA117:CA/+/2ABM0: :GLN542:CA/+/2ABM0: :VAL54:CA/+/2ABM0: :PHE650:CA/+/2ABM0: :THR183:CA/+/2ABM0: :PHE237:CA/+/2ABM0: :GLY653:CA/+/2ABM0: :GLY528:CA/+/2ABM0: :TRP525:CA/+/2ABM0: :VAL735:CA/+/2ABM0: :LEU751:CA/+/2ABM0: :VAL539:CA/+/2ABM0: :VAL602:CA/+/2ABM0: :LEU146:CA/+/2ABM0: :TRP200:CA/+/2ABM0: :GLY287:CA/+/2ABM0: :LEU172:CA/+/2ABM0: :ILE899:CA/+/2ABM0: :GLY492:CA/+/2ABM0: :ALA344:CA/+/2ABM0: :ALA389:CA/+/2ABM0: :LEU681:CA/+/2ABM0: :PRO868:CA/+/2ABM0: :LEU622:CA/+/2ABM0: :LEU15:CA/+/2ABM0: :LEU374:CA/+/2ABM0: :THR107:CA/+/2ABM0: :GLY38:CA/+/2ABM0: :PHE467:CA/+/2ABM0: :TYR904:CA/+/2ABM0: :GLY151:CA/+/2ABM0: :GLY378:CA/+/2ABM0: :LYS560:CA/+/2ABM0: :GLY248:CA/+/2ABM0: :ILE774:CA/+/2ABM0: :GLY300:CA/+/2ABM0: :ILE715:CA/+/2ABM0: :GLY796:CA/+/2ABM0: :GLU761:CA/+/2ABM0: :ASN63:CA/+/2ABM0: :TRP14:CA/+/2ABM0: :THR730:CA/+/2ABM0: :SER130:CA/+/2ABM0: :ILE403:CA/+/2ABM0: :PHE724:CA/+/2ABM0: :LEU729:CA/+/2ABM0: :VAL24:CA/+/2ABM0: :VAL817:CA/+/2ABM0: :LEU499:CA/+/2ABM0: :ILE630:CA/+/2ABM0: :ALA551:CA/+/2ABM0: :PRO580:CA/+/2ABM0: :ALA169:CA/+/2ABM0: :PHE483:CA/+/2ABM0: :GLU203:CA/+/2ABM0: :VAL375:CA/+/2ABM0: :PHE757:CA/+/2ABM0: :MET131:CA/+/2ABM0: :TYR311:CA/+/2ABM0: :GLY670:CA/+/2ABM0: :VAL591:CA/+/2ABM0: :ALA777:CA/+/2ABM0: :PRO807:CA/+/2ABM0: :ILE34:CA/+/2ABM0: :ALA112:CA/+/2ABM0: :ALA162:CA/+/2ABM0: :ALA613:CA/+/2ABM0: :LEU486:CA/+/2ABM0: :LEU656:CA/+/2ABM0: :GLY28:CA/+/2ABM0: :THR645:CA/+/2ABM0: :PHE506:CA/+/2ABM0: :LEU362:CA/+/2ABM0: :ILE166:CA/+/2ABM0: :ARG451:CA/+/2ABM0: :VAL193:CA/+/2ABM0: :ALA324:CA/+/2ABM0: :ILE511:CA/+/2ABM0: :HIS150:CA/+/2ABM0: :HIS805:CA/+/2ABM0: :LEU170:CA/+/2ABM0: :VAL775:CA/+/2ABM0: :ARG643:CA/+/2ABM0: :SER58:CA/+/2ABM0: :VAL818:CA/+/2ABM0: :GLY387:CA/+/2ABM0: :GLN885:CA/+/2ABM0: :PHE2:CA/+/2ABM0: :GLY44:CA/+/2ABM0: :ILE547:CA/+/2ABM0: :LEU101:CA/+/2ABM0: :VAL321:CA/+/2ABM0: :GLU307:CA/+/2ABM0: :PRO187:CA/+/2ABM0: :VAL308:CA/+/2ABM0: :PHE13:CA/+/2ABM0: :ILE405:CA/+/2ABM0: :LEU432:CA/+/2ABM0: :ILE178:CA/+/2ABM0: :THR273:CA/+/2ABM0: :ILE672:CA/+/2ABM0: :ALA370:CA/+/2ABM0: :GLY509:CA/+/2ABM0: :ILE93:CA/+/2ABM0: :ALA655:CA/+/2ABM0: :PHE229:CA/+/2ABM0: :GLY725:CA/+/2ABM0: :LEU626:CA/+/2ABM0: :HIS737:CA/+/2ABM0: :ASN800:CA/+/2ABM0: :VAL668:CA/+/2ABM0: :PHE710:CA/+/2ABM0: :ILE783:CA/+/2ABM0: :ALA566:CA/+/2ABM0: :TYR765:CA/+/2ABM0: :SER345:CA/+/2ABM0: :ALA113:CA/+/2ABM0: :GLN878:CA/+/2ABM0: :THR852:CA/+/2ABM0: :ILE488:CA/+/2ABM0: :GLU657:CA/+/2ABM0: :PHE208:CA/+/2ABM0: :GLY559:CA/+/2ABM0: :LEU680:CA/+/2ABM0: :VAL770:CA/+/2ABM0: :VAL771:CA/+/2ABM0: :THR380:CA/+/2ABM0: :GLY808:CA/+/2ABM0: :THR625:CA/+/2ABM0: :GLY537:CA/+/2ABM0: :ALA152:CA/+/2ABM0: :ILE620:CA/+/2ABM0: :PHE29:CA/+/2ABM0: :THR561:CA/+/2ABM0: :VAL520:CA/+/2ABM0: :LEU851:CA/+/2ABM0: :ALA723:CA/+/2ABM0: :GLY442:CA/+/2ABM0: :GLY18:CA/+/2ABM0: :SER341:CA/+/2ABM0: :ALA96:CA/+/2ABM0: :LEU600:CA/+/2ABM0: :GLY574:CA/+/2ABM0: :MET1:CA/+/2ABM0: :GLY498:CA/+/2ABM0: :PHE615:CA/+/2ABM0: :LYS836:CA/+/2ABM0: :ALA269:CA/+/2ABM0: :SER871:CA/+/2ABM0: :GLY21:CA/+/2ABM0: :GLY426:CA/+/2ABM0: :LEU552:CA/+/2ABM0: :TRP206:CA/+/2ABM0: :PRO163:CA/+/2ABM0: :PHE889:CA/+/2ABM0: :THR862:CA/+/2ABM0: :ARG870:CA/+/2ABM0: :VAL639:CA/+/2ABM0: :SER417:CA/+/2ABM0: :ALA53:CA/+/2ABM0: :LEU907:CA/+/2ABM0: :ARG678:CA/+/2ABM0: :ALA78:CA/+/2ABM0: :ALA606:CA/+/2ABM0: :GLY482:CA/+/2ABM0: :ALA505:CA/+/2ABM0: :PHE388:CA/+/2ABM0: :GLY473:CA/+/2ABM0: :PHE842:CA/+/2ABM0: :GLY789:CA/+/2ABM0: :ALA588:CA/+/2ABM0: :ILE86:CA/+/2ABM0: :SER638:CA/+/2ABM0: :GLY736:CA/+/2ABM0: :ASN744:CA/+/2ABM0: :ASP835:CA/+/2ABM0: :GLY120:CA/+/2ABM0: :ALA526:CA/+/2ABM0: :THR67:CA/+/2ABM0: :LEU268:CA/+/2ABM0: :ALA233:CA/+/2ABM0: :THR398:CA/+/2ABM0: :LEU328:CA/+/2ABM0: :ALA111:CA/+/2ABM0: :ALA267:CA/+/2ABM0: :ALA338:CA/+/2ABM0: :ILE556:CA/+/2ABM0: :TYR129:CA/+/2ABM0: :THR418:CA/+/2ABM0: :SER142:CA/+/2ABM0: :ARG457:CA/+/2ABM0: :SER190:CA/+/2ABM0: :VAL508:CA/+/2ABM0: :GLY83:CA/+/2ABM0: :PHE17:CA/+/2ABM0: :ALA704:CA/+/2ABM0: :PHE733:CA/+/2ABM0: :PHE516:CA/+/2ABM0: :PHE303:CA/+/2ABM0: :LEU232:CA/+/2ABM0: :GLU138:CA/+/2ABM0: :GLY332:CA/+/2ABM0: :LEU624:CA/+/2ABM0: :SER795:CA/+/2ABM0: :LEU495:CA/+/2ABM0: :SER357:CA/+/2ABM0: :GLY55:CA/+/2ABM0: :PRO304:CA/+/2ABM0: :LEU553:CA/+/2ABM0: :PHE599:CA/+/2ABM0: :ALA746:CA/+/2ABM0: :SER799:CA/+/2ABM0: :VAL312:CA/+/2ABM0: :THR834:CA/+/2ABM0: :GLY673:CA/+/2ABM0: :HIS351:CA/+/2ABM0: :GLY122:CA/+/2ABM0: :PRO711:CA/+/2ABM0: :LEU395:CA/+/2ABM0: :PRO641:CA/+/2ABM0: :GLU430:CA/+/2ABM0: :VAL367:CA/+/2ABM0: :LEU359:CA/+/2ABM0: :ALA159:CA/+/2ABM0: :TYR802:CA/+/2ABM0: :ARG684:CA/+/2ABM0: :GLU31:CA/+/2ABM0: :GLY310:CA/+/2ABM0: :PHE610:CA/+/2ABM0: :THR181:CA/+/2ABM0: :SER352:CA/+/2ABM0: :ALA415:CA/+/2ABM0: :CYS236:CA/+/2ABM0: :ALA6:CA/+/2ABM0: :GLY342:CA/+/2ABM0: :SER369:CA/+/2ABM0: :VAL148:CA/+/2ABM0: :ALA322:CA/+/2ABM0: :ILE618:CA/+/2ABM0: :LYS760:CA/+/2ABM0: :VAL47:CA/+/2ABM0: :ILE391:CA/+/2ABM0: :ALA305:CA/+/2ABM0: :LEU469:CA/+/2ABM0: :ILE102:CA/+/2ABM0: :GLN315:CA/+/2ABM0: :SER177:CA/+/2ABM0: :HIS628:CA/+/2ABM0: :PHE490:CA/+/2ABM0: :ALA340:CA/+/2ABM0: :VAL180:CA/+/2ABM0: :ILE627:CA/+/2ABM0: :ALA648:CA/+/2ABM0: :THR46:CA/+/2ABM0: :ALA386:CA/+/2ABM0: :GLY740:CA/+/2ABM0: :ALA314:CA/+/2ABM0: :ILE847:CA/+/2ABM0: :ASN409:CA/+/2ABM0: :MET228:CA/+/2ABM0: :PHE196:CA/+/2ABM0: :CYS463:CA/+/2ABM0: :ALA361:CA/+/2ABM0: :GLY92:CA/+/2ABM0: :ILE261:CA/+/2ABM0: :HIS283:CA/+/2ABM0: :ALA815:CA/+/2ABM0: :VAL634:CA/+/2ABM0: :ALA824:CA/+/2ABM0: :ALA157:CA/+/2ABM0: :LEU175:CA/+/2ABM0: :GLY569:CA/+/2ABM0: :ILE830:CA/+/2ABM0: :GLY215:CA/+/2ABM0: :GLY199:CA/+/2ABM0: :GLY198:CA/+/2ABM0: :ALA567:CA/+/2ABM0: :LYS787:CA/+/2ABM0: :VAL412:CA/+/2ABM0: :MET504:CA/+/2ABM0: :LEU70:CA/+/2ABM0: :ASN640:CA/+/2ABM0: :ALA460:CA/+/2ABM0: :LEU601:CA/+/2ABM0: :SER125:CA/+/2ABM0: :ALA27:CA/+/2ABM0: :LEU722:CA/+/2ABM0: :ALA428:CA/+/2ABM0: :HIS578:CA/+/2ABM0: :GLY74:CA/+/2ABM0: :THR521:CA/+/2ABM0: :LEU593:CA/+/2ABM0: :ILE222:CA/+/2ABM0: :SER512:CA/+/2ABM0: :LEU902:CA/+/2ABM0: :TRP433:CA/+/2ABM0: :TYR781:CA/+/2ABM0: :SER739:CA/+/2ABM0: :ASN573:CA/+/2ABM0: :ALA481:CA/+/2ABM0: :PRO617:CA/+/2ABM0: :ALA95:CA/+/2ABM0: :SER587:CA/+/2ABM0: :GLU462:CA/+/2ABM0: :ALA687:CA/+/2ABM0: :VAL16:CA/+/2ABM0: :GLY447:CA/+/2ABM0: :VAL89:CA/+/2ABM0: :GLY220:CA/+/2ABM0: :PHE240:CA/+/2ABM0: :ALA480:CA/+/2ABM0: :PHE877:CA/+/2ABM0: :GLY901:CA/+/2ABM0: :LEU275:CA/+/2ABM0: :GLY69:CA/+/2ABM0: :VAL243:CA/+/2ABM0: :PHE383:CA/+/2ABM0: :ALA532:CA/+/2ABM0: :VAL747:CA/+/2ABM0: :LEU168:CA/+/2ABM0: :SER568:CA/+/2ABM0: :ALA299:CA/+/2ABM0: :THR788:CA/+/2ABM0: :GLY900:CA/+/2ABM0: :ILE876:CA/+/2ABM0: :TRP752:CA/+/2ABM0: :TYR223:CA/+/2ABM0: :GLY286:CA/+/2ABM0: :THR408:CA/+/2ABM0: :SER823:CA/+/2ABM0: :LEU397:CA/+/2ABM0: :VAL274:CA/+/2ABM0: :GLU258:CA/+/2ABM0: :ALA421:CA/+/2ABM0: :ALA339:CA/+/2ABM0: :THR679:CA/+/2ABM0: :GLY825:CA/+/2ABM0: :LEU883:CA/+/2ABM0: :GLY144:CA/+/2ABM0: :VAL501:CA/+/2ABM0: :LEU706:CA/+/2ABM0: :ALA616:CA/+/2ABM0: :LEU555:CA/+/2ABM0: :PRO291:CA/+/2ABM0: :GLY614:CA/+/2ABM0: :VAL281:CA/+/2ABM0: :VAL861:CA/+/2ABM0: :VAL892:CA/+/2ABM0: :THR864:CA/+/2ABM0: :PRO633:CA/+/2ABM0: :GLY523:CA/+/2ABM0: :ALA794:CA/+/2ABM0: :PRO844:CA/+/2ABM0: :ILE649:CA/+/2ABM0: :GLY832:CA/+/2ABM0: :SER104:CA/+/2ABM0: :VAL470:CA/+/2ABM0: :GLY514:CA/+/2ABM0: :SER404:CA/+/2ABM0: :GLY772:CA/+/2ABM0: :ILE176:CA/+/2ABM0: :ALA278:CA/+/2ABM0: :ILE295:CA/+/2ABM0: :GLY719:CA/+/2ABM0: :VAL762:CA/+/2ABM0: :ILE859:CA/+/2ABM0: :ASN863:CA/+/2ABM0: :ILE195:CA/+/2ABM0: :TRP890:CA/+/2ABM0: :PHE76:CA/+/2ABM0: :PRO758:CA/+/2ABM0: :SER572:CA/+/2ABM0: :ILE738:CA/+/2ABM0: :SER584:CA/+/2ABM0: :GLY11:CA/+/2ABM0: :GLY803:CA/+/2ABM0: :GLU689:CA/+/2ABM0: :LEU326:CA/+/2ABM0: :PHE837:CA/+/2ABM0: :GLY489:CA/+/2ABM0: :ALA234:CA/+/2ABM0: :PRO414:CA/+/2ABM0: :ALA280:CA/+/2ABM0: :LEU297:CA/+/2ABM0: :THR294:CA/+/2ABM0: :ALA37:CA/+/2ABM0: :TYR356:CA/+/2ABM0: :ALA134:CA/+/2ABM0: :GLY394:CA/+/2ABM0: :LEU99:CA/+/2ABM0: :LEU202:CA/+/2ABM0: :VAL763:CA/+/2ABM0: :TYR583:CA/+/2ABM0: :VAL874:CA/+/2ABM0: :PHE797:CA/+/2ABM0: :PHE661:CA/+/2ABM0: :GLU592:CA/+/2ABM0: :ASN119:CA/+/2ABM0: :SER596:CA/+/2ABM0: :PHE36:CA/+/2ABM0: :ASN290:CA/+/2ABM0: :VAL720:CA/+/2ABM0: :ALA40:CA/+/2ABM0: :THR49:CA/+/2ABM0: :PHE434:CA/+/2ABM0: :GLY115:CA/+/2ABM0: :PHE423:CA/+/2ABM0: :ALA384:CA/+/2ABM0: :PHE145:CA/+/2ABM0: :LEU132:CA/+/2ABM0: :VAL821:CA/+/2ABM0: :THR607:CA/+/2ABM0: :GLU534:CA/+/2ABM0: :ALA42:CA/+/2ABM0: :MET277:CA/+/2ABM0: :GLY764:CA/+/2ABM0: :ALA707:CA/+/2ABM0: :GLU712:CA/+/2ABM0: :ILE767:CA/+/2ABM0: :ILE676:CA/+/2ABM0: :ARG189:CA/+/2ABM0: :GLY262:CA/+/2ABM0: :VAL407:CA/+/2ABM0: :CYS474:CA/+/2ABM0: :TYR84:CA/+/2ABM0: :THR503:CA/+/2ABM0: :PRO839:CA/+/2ABM0: :GLN769:CA/+/2ABM0: :LEU886:CA/+/2ABM0: :ALA840:CA/+/2ABM0: :GLY245:CA/+/2ABM0: :ILE284:CA/+/2ABM0: :ALA597:CA/+/2ABM0: :VAL210:CA/+/2ABM0: :ILE376:CA/+/2ABM0: :SER703:CA/+/2ABM0: :PHE691:CA/+/2ABM0: :PRO385:CA/+/2ABM0: :VAL316:CA/+/2ABM0: :GLY848:CA/+/2ABM0: :VAL536:CA/+/2ABM0: :GLY674:CA/+/2ABM0: :ALA491:CA/+/2ABM0: :ALA688:CA/+/2ABM0: :VAL85:CA/+/2ABM0: :GLN197:CA/+/2ABM0: :GLN658:CA/+/2ABM0: :TRP881:CA/+/2ABM0: :ALA165:CA/+/2ABM0: :GLY371:CA/+/2ABM0: :TRP71:CA/+/2ABM0: :ALA732:CA/+/2ABM0: :ALA718:CA/+/2ABM0: :GLY652:CA/+/2ABM0: :VAL728:CA/+/2ABM0: :SER631:CA/+/2ABM0: :VAL664:CA/+/2ABM0: :ILE218:CA/+/2ABM0: :LEU45:CA/+/2ABM0: :PHE683:CA/+/2ABM0: :LEU820:CA/+/2ABM0: :ARG302:CA/+/2ABM0: :PRO484:CA/+/2ABM0: :ILE671:CA/+/2ABM0: :PRO893:CA/+/2ABM0: :THR466:CA/+/2ABM0: :LEU659:CA/+/2ABM0: :PHE244:CA/+/2ABM0: :PHE289:CA/+/2ABM0: :ALA549:CA/+/2ABM0: :GLY702:CA/+/2ABM0: :LEU782:CA/+/2ABM0: :GLY355:CA/+/2ABM0: :HIS855:CA/+/2ABM0: :GLU804:CA/+/2ABM0: :GLU8:CA/+/2ABM0: :ILE320:CA/+/2ABM0: :ALA461:CA/+/2ABM0: :LEU586:CA/+/2ABM0: :ASN413:CA/+/2ABM0: :GLY219:CA/+/2ABM0: :VAL251:CA/+/2ABM0: :THR452:CA/+/2ABM0: :LEU141:CA/+/2ABM0: :LEU675:CA/+/2ABM0: :PHE743:CA/+/2ABM0: :ALA793:CA/+/2ABM0: :GLY598:CA/+/2ABM0: :ASN517:CA/+/2ABM0: :VAL647:CA/+/2ABM0: :ALA379:CA/+/2ABM0: :PHE790:CA/+/2ABM0: :VAL363:CA/+/2ABM0: :HIS401:CA/+/2ABM0: :SER133:CA/+/2ABM0: :PHE343:CA/+/2ABM0: :ALA496:CA/+/2ABM0: :GLY545:CA/+/2ABM0: :GLY841:CA/+/2ABM0: :ALA759:CA/+/2ABM0: :TYR327:CA/+/2ABM0: :LEU139:CA/+/2ABM0: :ILE444:CA/+/2ABM0: :LYS458:CA/+/2ABM0: :ALA72:CA/+/2ABM0: :ALA507:CA/+/2ABM0: :GLU80:CA/+/2ABM0: :ALA477:CA/+/2ABM0: :TRP660:CA/+/2ABM0: :PHE207:CA/+/2ABM0: :GLY108:CA/+/2ABM0: :GLY562:CA/+/2ABM0: :ILE313:CA/+/2ABM0: :GLY127:CA/+/2ABM0: :LEU41:CA/+/2ABM0: :ILE149:CA/+/2ABM0: :GLY546:CA/+/2ABM0: :VAL544:CA/+/2ABM0: :GLY487:CA/+/2ABM0: :ALA65:CA/+/2ABM0: :VAL766:CA/+/2ABM0: :LEU595:CA/+/2ABM0: :LYS685:CA/+/2ABM0: :SER558:CA/+/2ABM0: :THR727:CA/+/2ABM0: :GLY19:CA/+/2ABM0: :TRP241:CA/+/2ABM0: :PHE826:CA/+/2ABM0: :GLY128:CA/+/2ABM0: :THR334:CA/+/2ABM0: :TYR810:CA/+/2ABM0: :GLY750:CA/+/2ABM0: :GLU884:CA/+/2ABM0: :ALA26:CA/+/2ABM0: :PRO126:CA/+/2ABM0: :GLY35:CA/+/2ABM0: :VAL293:CA/+/2ABM0: :PHE694:CA/+/2ABM0: :ARG3:CA/+/2ABM0: :GLY513:CA/+/2ABM0: :TRP436:CA/+/2ABM0: :VAL543:CA/+/2ABM0: :PHE256:CA/+/2ABM0: :PRO612:CA/+/2ABM0: :PHE497:CA/+/2ABM0: :PRO158:CA/+/2ABM0: :TRP468:CA/+/2ABM0: :GLY160:CA/+/2ABM0: :GLY700:CA/+/2ABM0: :HIS61:CA/+/2ABM0: :ASP110:CA/+/2ABM0: :ASN346:CA/+/2ABM0: :GLN88:CA/+/2ABM0: :PHE116:CA/+/2ABM0: :ALA194:CA/+/2ABM0: :GLY716:CA/+/2ABM0: :SER118:CA/+/2ABM0: :THR693:CA/+/2ABM0: :VAL66:CA/+/2ABM0: :MET585:CA/+/2ABM0: :PHE662:CA/+/2ABM0: :ALA253:CA/+/2ABM0: :LEU454:CA/+/2ABM0: :GLY709:CA/+/2ABM0: :PHE109:CA/+/2ABM0: :TYR348:CA/+/2ABM0: :VAL548:CA/+/2ABM0: :GLY260:CA/+/2ABM0: :LEU259:CA/+/2ABM0: :LEU399:CA/+/2ABM0: :VAL697:CA/+/2ABM0: :ALA778:CA/+/2ABM0: :LYS609:CA/+/2ABM0: :ASP337:CA/+/2ABM0: :MET812:CA/+/2ABM0: :VAL309:CA/+/2ABM0: :LEU686:CA/+/2ABM0: :LEU32:CA/+/2ABM0: :LEU448:CA/+/2ABM0: :ILE540:CA/+/2ABM0: :ILE445:CA/+/2ABM0: :THR276:CA/+/2ABM0: :VAL136:CA/+/2ABM0: :LEU48:CA/+/2ABM0: :SER22:CA/+/2ABM0: :ALA254:CA/+/2ABM0: :TRP298:CA/+/2ABM0: :LEU135:CA/+/2ABM0: :GLN424:CA/+/2ABM0: :GLY669:CA/+/2ABM0: :ALA882:CA/+/2ABM0: :ALA646:CA/+/2ABM0: :ILE440:CA/+/2ABM0: :LYS231:CA/+/2ABM0: :ALA557:CA/+/2ABM0: :ALA571:CA/+/2ABM0: :ILE749:CA/+/2ABM0: :LEU713:CA/+/2ABM0: :ALA611:CA/+/2ABM0: :ALA103:CA/+/2ABM0: :GLY238:CA/+/2ABM0: :GLY472:CA/+/2ABM0: :ILE400:CA/+/2ABM0: :PHE530:CA/+/2ABM0: :VAL437:CA/+/2ABM0: :ILE173:CA/+/2ABM0: :GLN651:CA/+/2ABM0: :GLY692:CA/+/2ABM0: :PRO179:CA/+/2ABM0: :ASN182:CA/+/2ABM0: :ALA850:CA/+/2ABM0: :SER806:CA/+/2ABM0: :VAL705:CA/+/2ABM0: :GLY167:CA/+/2ABM0: :HIS56:CA/+/2ABM0: :LEU502:CA/+/2ABM0: :ALA721:CA/+/2ABM0: :ASP564:CA/+/2ABM0: :LEU402:CA/+/2ABM0: :TYR100:CA/+/2ABM0: :GLY349:CA/+/2ABM0: :SER360:CA/+/2ABM0: :LEU25:CA/+/2ABM0: :GLY880:CA/+/2ABM0: :ILE57:CA/+/2ABM0: :PRO353:CA/+/2ABM0: :LYS4:CA/+/2ABM0: :ARG905:CA/+/2ABM0: :PRO212:CA/+/2ABM0: :GLY879:CA/+/2ABM0: :LEU5:CA/+/2ABM0: :LEU828:CA/+/2ABM0: :PRO406:CA/+/2ABM0: :SER811:CA/+/2ABM0: :PHE156:CA/+/2ABM0: :VAL140:CA/+/2ABM0: :CYS701:CA/+/2ABM0: :VAL94:CA/+/2ABM0: :GLY73:CA/+/2ABM0: :PRO666:CA/+/2ABM0: :GLY754:CA/+/2ABM0: :HIS831:CA/+/2ABM0: :TYR121:CA/+/2ABM0: :LEU822:CA/+/2ABM0: :VAL185:CA/+/2ABM0: :GLY105:CA/+/2ABM0: :ALA846:CA/+/2ABM0: :ALA51:CA/+/2ABM0: :TRP209:CA/+/2ABM0: :GLN204:CA/+/2ABM0: :ALA419:CA/+/2ABM0: :SER476:CA/+/2ABM0: :TRP695:CA/+/2ABM0: :VAL137:CA/+/2ABM0: :HIS124:CA/+/2ABM0: :PHE570:CA/+/2ABM0: :VAL364:CA/+/2ABM0: :ARG756:CA/+/2ABM0: :ASP791:CA/+/2ABM0: :LEU227:CA/+/2ABM0: :GLU485:CA/+/2ABM0: :ILE422:CA/+/2ABM0: :PHE336:CA/+/2ABM0: :SER114:CA/+/2ABM0: :PRO860:CA/+/2ABM0: :VAL895:CA/+/2ABM0: :THR12:CA/+/2ABM0: :ILE667:CA/+/2ABM0: :VAL493:CA/+/2ABM0: :ALA541:CA/+/2ABM0: :LEU779:CA/+/2ABM0: :PHE263:CA/+/2ABM0: :VAL665:CA/+/2ABM0: :LYS333:CA/+/2ABM0: :THR225:CA/+/2ABM0: :ALA776:CA/+/2ABM0: :LEU221:CA/+/2ABM0: :PHE62:CA/+/2ABM0: :GLY216:CA/+/2ABM0: :ILE898:CA/+/2ABM0: :GLY91:CA/+/2ABM0: :ALA143:CA/+/2ABM0: :LEU726:CA/+/2ABM0: :ASN186:CA/+/2ABM0: :GLY271:CA/+/2ABM0: :GLU123:CA/+/2ABM0: :HIS742:CA/+/2ABM0: :VAL594:CA/+/2ABM0: :SER285:CA/+/2ABM0: :ALA396:CA/+/2ABM0: :CYS20:CA/+/2ABM0: :ALA519:CA/+/2ABM0: :ILE68:CA/+/2ABM0: :ALA798:CA/+/2ABM0: :THR171:CA/+/2ABM0: :ALA869:CA/+/2ABM0: :ILE603:CA/+/2ABM0: :GLY786:CA/+/2ABM0: :ILE164:CA/+/2ABM0: :LEU252:CA/+/2ABM0: :PHE456:CA/+/2ABM0: :THR635:CA/+/2ABM0: :ARG75:CA",negate=False, only=True, xor=False, log=0, intersect=False)
atoms2=self.select("2abm:G:GLY167:CA/+/2abm:F:ASP154:CA/+/2abm:G:PRO64:CA/+/2abm:H:GLY33:CA/+/2abm:H:ILE222:CA/+/2abm:F:GLY198:CA/+/2abm:H:LEU172:CA/+/2abm:E:ALA87:CA/+/2abm:F:LEU226:CA/+/2abm:E:ARG224:CA/+/2abm:E:GLY59:CA/+/2abm:H:VAL148:CA/+/2abm:G:ASP154:CA/+/2abm:E:LEU226:CA/+/2abm:H:SER177:CA/+/2abm:F:VAL39:CA/+/2abm:E:ALA188:CA/+/2abm:H:GLY128:CA/+/2abm:E:PHE10:CA/+/2abm:E:THR153:CA/+/2abm:E:ILE217:CA/+/2abm:G:HIS61:CA/+/2abm:G:TYR84:CA/+/2abm:H:ALA27:CA/+/2abm:G:PRO77:CA/+/2abm:H:GLY18:CA/+/2abm:E:LYS155:CA/+/2abm:F:HIS61:CA/+/2abm:G:THR46:CA/+/2abm:F:ILE166:CA/+/2abm:G:HIS56:CA/+/2abm:G:TRP209:CA/+/2abm:G:ILE178:CA/+/2abm:E:ALA97:CA/+/2abm:H:THR67:CA/+/2abm:E:MET50:CA/+/2abm:F:ALA96:CA/+/2abm:E:LEU147:CA/+/2abm:F:LEU139:CA/+/2abm:E:SER184:CA/+/2abm:E:PRO30:CA/+/2abm:F:GLU8:CA/+/2abm:H:ALA157:CA/+/2abm:E:LYS79:CA/+/2abm:F:LEU98:CA/+/2abm:E:VAL211:CA/+/2abm:G:GLY151:CA/+/2abm:G:GLY21:CA/+/2abm:F:TYR223:CA/+/2abm:F:GLY91:CA/+/2abm:F:GLY92:CA/+/2abm:G:TYR100:CA/+/2abm:G:PHE17:CA/+/2abm:H:SER104:CA/+/2abm:H:THR225:CA/+/2abm:E:ILE213:CA/+/2abm:H:SER133:CA/+/2abm:F:VAL193:CA/+/2abm:E:LEU205:CA/+/2abm:E:THR191:CA/+/2abm:G:ALA188:CA/+/2abm:G:THR183:CA/+/2abm:H:PHE17:CA/+/2abm:F:ALA37:CA/+/2abm:F:GLY127:CA/+/2abm:E:PHE43:CA/+/2abm:H:ILE213:CA/+/2abm:G:LEU175:CA/+/2abm:G:GLY122:CA/+/2abm:H:GLY60:CA/+/2abm:G:VAL24:CA/+/2abm:F:GLY108:CA/+/2abm:F:GLY74:CA/+/2abm:G:GLY128:CA/+/2abm:F:LYS155:CA/+/2abm:F:GLY19:CA/+/2abm:G:VAL81:CA/+/2abm:E:VAL214:CA/+/2abm:F:CYS20:CA/+/2abm:H:CYS9:CA/+/2abm:G:ARG75:CA/+/2abm:E:PHE52:CA/+/2abm:F:ILE222:CA/+/2abm:F:ARG189:CA/+/2abm:H:ALA103:CA/+/2abm:H:PHE36:CA/+/2abm:G:TRP200:CA/+/2abm:H:GLY215:CA/+/2abm:H:LEU227:CA/+/2abm:E:ALA192:CA/+/2abm:E:ALA201:CA/+/2abm:G:ALA169:CA/+/2abm:E:PRO77:CA/+/2abm:G:LEU5:CA/+/2abm:G:LYS79:CA/+/2abm:F:LEU45:CA/+/2abm:F:MET131:CA/+/2abm:H:VAL185:CA/+/2abm:F:PHE52:CA/+/2abm:E:PRO64:CA/+/2abm:H:LEU146:CA/+/2abm:G:ALA40:CA/+/2abm:F:SER22:CA/+/2abm:E:HIS174:CA/+/2abm:F:ALA165:CA/+/2abm:F:ALA23:CA/+/2abm:G:TYR121:CA/+/2abm:H:GLY92:CA/+/2abm:E:ASP154:CA/+/2abm:F:THR12:CA/+/2abm:G:GLY11:CA/+/2abm:F:GLU138:CA/+/2abm:F:GLY69:CA/+/2abm:E:GLY33:CA/+/2abm:F:GLY216:CA/+/2abm:H:PHE207:CA/+/2abm:F:PHE145:CA/+/2abm:F:VAL214:CA/+/2abm:H:MET50:CA/+/2abm:F:VAL90:CA/+/2abm:G:PHE10:CA/+/2abm:F:PRO163:CA/+/2abm:H:MET1:CA/+/2abm:G:GLY127:CA/+/2abm:G:TYR223:CA/+/2abm:G:ASN182:CA/+/2abm:H:ALA72:CA/+/2abm:H:ASN186:CA/+/2abm:E:CYS9:CA/+/2abm:H:THR191:CA/+/2abm:F:ARG3:CA/+/2abm:G:ALA111:CA/+/2abm:H:ALA194:CA/+/2abm:E:VAL82:CA/+/2abm:E:VAL90:CA/+/2abm:H:GLU138:CA/+/2abm:H:GLY74:CA/+/2abm:H:GLY216:CA/+/2abm:H:LEU168:CA/+/2abm:H:SER184:CA/+/2abm:F:GLY55:CA/+/2abm:G:LEU25:CA/+/2abm:F:GLY219:CA/+/2abm:F:VAL211:CA/+/2abm:G:PHE109:CA/+/2abm:F:GLN204:CA/+/2abm:H:ALA87:CA/+/2abm:F:PHE43:CA/+/2abm:F:LEU15:CA/+/2abm:H:ALA111:CA/+/2abm:H:ILE173:CA/+/2abm:H:ALA162:CA/+/2abm:F:PRO212:CA/+/2abm:H:TRP206:CA/+/2abm:G:GLY73:CA/+/2abm:G:LEU135:CA/+/2abm:G:SER125:CA/+/2abm:F:ALA65:CA/+/2abm:H:ALA152:CA/+/2abm:G:LEU70:CA/+/2abm:F:SER184:CA/+/2abm:F:GLY120:CA/+/2abm:H:GLY120:CA/+/2abm:H:ALA53:CA/+/2abm:G:ILE68:CA/+/2abm:F:TRP200:CA/+/2abm:G:VAL136:CA/+/2abm:H:ILE176:CA/+/2abm:F:LEU141:CA/+/2abm:H:ILE164:CA/+/2abm:F:ALA103:CA/+/2abm:E:PHE161:CA/+/2abm:F:GLY38:CA/+/2abm:G:GLU123:CA/+/2abm:F:HIS150:CA/+/2abm:H:LEU99:CA/+/2abm:F:GLU123:CA/+/2abm:E:ALA23:CA/+/2abm:F:PRO30:CA/+/2abm:F:LEU146:CA/+/2abm:H:LEU135:CA/+/2abm:G:HIS150:CA/+/2abm:H:LEU132:CA/+/2abm:F:THR183:CA/+/2abm:G:MET1:CA/+/2abm:G:ALA96:CA/+/2abm:E:VAL81:CA/+/2abm:G:SER190:CA/+/2abm:H:VAL210:CA/+/2abm:F:GLY28:CA/+/2abm:H:LEU15:CA/+/2abm:E:LYS106:CA/+/2abm:G:ALA165:CA/+/2abm:F:ILE102:CA/+/2abm:F:LYS79:CA/+/2abm:H:ALA192:CA/+/2abm:E:ALA7:CA/+/2abm:H:LEU175:CA/+/2abm:E:LEU98:CA/+/2abm:E:VAL39:CA/+/2abm:F:SER104:CA/+/2abm:H:PRO64:CA/+/2abm:F:LEU202:CA/+/2abm:F:PHE208:CA/+/2abm:E:GLY60:CA/+/2abm:E:ALA117:CA/+/2abm:G:GLN88:CA/+/2abm:E:VAL54:CA/+/2abm:G:PHE196:CA/+/2abm:E:THR183:CA/+/2abm:F:PHE10:CA/+/2abm:G:GLY199:CA/+/2abm:G:GLY74:CA/+/2abm:G:TRP71:CA/+/2abm:H:VAL54:CA/+/2abm:H:LEU70:CA/+/2abm:G:VAL85:CA/+/2abm:G:VAL148:CA/+/2abm:E:LEU146:CA/+/2abm:E:TRP200:CA/+/2abm:F:GLY60:CA/+/2abm:E:LEU172:CA/+/2abm:H:ILE218:CA/+/2abm:G:GLY38:CA/+/2abm:F:ALA117:CA/+/2abm:F:ALA162:CA/+/2abm:G:LEU227:CA/+/2abm:H:PRO187:CA/+/2abm:G:LEU168:CA/+/2abm:E:LEU15:CA/+/2abm:F:LEU147:CA/+/2abm:E:THR107:CA/+/2abm:E:GLY38:CA/+/2abm:G:PHE13:CA/+/2abm:H:TYR223:CA/+/2abm:E:GLY151:CA/+/2abm:F:GLY151:CA/+/2abm:G:LYS106:CA/+/2abm:F:GLY21:CA/+/2abm:H:ILE93:CA/+/2abm:F:GLY73:CA/+/2abm:H:ILE34:CA/+/2abm:H:GLY115:CA/+/2abm:H:GLU80:CA/+/2abm:E:ASN63:CA/+/2abm:E:TRP14:CA/+/2abm:H:THR49:CA/+/2abm:E:SER130:CA/+/2abm:F:ILE176:CA/+/2abm:H:PHE43:CA/+/2abm:H:LEU48:CA/+/2abm:E:VAL24:CA/+/2abm:H:VAL136:CA/+/2abm:G:LEU45:CA/+/2abm:G:ILE176:CA/+/2abm:G:ALA97:CA/+/2abm:G:PRO126:CA/+/2abm:E:ALA169:CA/+/2abm:G:PHE29:CA/+/2abm:E:GLU203:CA/+/2abm:F:VAL148:CA/+/2abm:H:PHE76:CA/+/2abm:E:MET131:CA/+/2abm:F:TYR84:CA/+/2abm:G:GLY216:CA/+/2abm:G:VAL137:CA/+/2abm:H:ALA96:CA/+/2abm:H:PRO126:CA/+/2abm:E:ILE34:CA/+/2abm:E:ALA112:CA/+/2abm:E:ALA162:CA/+/2abm:G:ALA159:CA/+/2abm:G:LEU32:CA/+/2abm:G:LEU202:CA/+/2abm:E:GLY28:CA/+/2abm:G:THR191:CA/+/2abm:G:PHE52:CA/+/2abm:F:LEU135:CA/+/2abm:E:ILE166:CA/+/2abm:F:ARG224:CA/+/2abm:E:VAL193:CA/+/2abm:F:ALA97:CA/+/2abm:G:ILE57:CA/+/2abm:E:HIS150:CA/+/2abm:H:HIS124:CA/+/2abm:E:LEU170:CA/+/2abm:H:VAL94:CA/+/2abm:G:ARG189:CA/+/2abm:E:SER58:CA/+/2abm:H:VAL137:CA/+/2abm:F:GLY160:CA/+/2abm:H:GLN204:CA/+/2abm:E:PHE2:CA/+/2abm:E:GLY44:CA/+/2abm:G:ILE93:CA/+/2abm:E:LEU101:CA/+/2abm:F:VAL94:CA/+/2abm:F:GLU80:CA/+/2abm:E:PRO187:CA/+/2abm:F:VAL81:CA/+/2abm:E:PHE13:CA/+/2abm:F:ILE178:CA/+/2abm:F:LEU205:CA/+/2abm:E:ILE178:CA/+/2abm:F:THR46:CA/+/2abm:G:ILE218:CA/+/2abm:F:ALA143:CA/+/2abm:G:GLY55:CA/+/2abm:E:ILE93:CA/+/2abm:G:ALA201:CA/+/2abm:F:PHE2:CA/+/2abm:H:GLY44:CA/+/2abm:G:LEU172:CA/+/2abm:H:HIS56:CA/+/2abm:H:ASN119:CA/+/2abm:G:VAL214:CA/+/2abm:H:PHE29:CA/+/2abm:H:ILE102:CA/+/2abm:G:ALA112:CA/+/2abm:H:TYR84:CA/+/2abm:F:SER118:CA/+/2abm:E:ALA113:CA/+/2abm:H:GLN197:CA/+/2abm:H:THR171:CA/+/2abm:G:ILE34:CA/+/2abm:G:GLU203:CA/+/2abm:E:PHE208:CA/+/2abm:G:GLY105:CA/+/2abm:G:LEU226:CA/+/2abm:H:VAL89:CA/+/2abm:H:VAL90:CA/+/2abm:F:THR153:CA/+/2abm:H:GLY127:CA/+/2abm:G:THR171:CA/+/2abm:G:GLY83:CA/+/2abm:E:ALA152:CA/+/2abm:G:ILE166:CA/+/2abm:E:PHE29:CA/+/2abm:G:THR107:CA/+/2abm:G:VAL66:CA/+/2abm:H:LEU170:CA/+/2abm:H:ALA42:CA/+/2abm:F:GLY215:CA/+/2abm:E:GLY18:CA/+/2abm:F:SER114:CA/+/2abm:E:ALA96:CA/+/2abm:G:LEU146:CA/+/2abm:G:GLY120:CA/+/2abm:E:MET1:CA/+/2abm:G:GLY44:CA/+/2abm:G:PHE161:CA/+/2abm:H:LYS155:CA/+/2abm:F:ALA42:CA/+/2abm:H:SER190:CA/+/2abm:E:GLY21:CA/+/2abm:F:GLY199:CA/+/2abm:G:LEU98:CA/+/2abm:E:TRP206:CA/+/2abm:E:PRO163:CA/+/2abm:H:PHE208:CA/+/2abm:H:THR181:CA/+/2abm:H:ARG189:CA/+/2abm:G:VAL185:CA/+/2abm:F:SER190:CA/+/2abm:E:ALA53:CA/+/2abm:H:LEU226:CA/+/2abm:G:ARG224:CA/+/2abm:E:ALA78:CA/+/2abm:G:ALA152:CA/+/2abm:G:GLY28:CA/+/2abm:G:ALA51:CA/+/2abm:F:PHE161:CA/+/2abm:G:GLY19:CA/+/2abm:H:PHE161:CA/+/2abm:H:GLY108:CA/+/2abm:G:ALA134:CA/+/2abm:E:ILE86:CA/+/2abm:G:SER184:CA/+/2abm:H:GLY55:CA/+/2abm:H:ASN63:CA/+/2abm:H:ASP154:CA/+/2abm:E:GLY120:CA/+/2abm:G:ALA72:CA/+/2abm:E:THR67:CA/+/2abm:F:LEU41:CA/+/2abm:F:ALA6:CA/+/2abm:F:THR171:CA/+/2abm:F:LEU101:CA/+/2abm:E:ALA111:CA/+/2abm:F:ALA40:CA/+/2abm:F:ALA111:CA/+/2abm:G:ILE102:CA/+/2abm:E:TYR129:CA/+/2abm:F:THR191:CA/+/2abm:E:SER142:CA/+/2abm:G:ARG3:CA/+/2abm:E:SER190:CA/+/2abm:G:VAL54:CA/+/2abm:E:GLY83:CA/+/2abm:E:PHE17:CA/+/2abm:H:ALA23:CA/+/2abm:H:PHE52:CA/+/2abm:G:PHE62:CA/+/2abm:F:PHE76:CA/+/2abm:F:LEU5:CA/+/2abm:E:GLU138:CA/+/2abm:F:GLY105:CA/+/2abm:G:LEU170:CA/+/2abm:H:SER114:CA/+/2abm:G:LEU41:CA/+/2abm:F:SER130:CA/+/2abm:E:GLY55:CA/+/2abm:F:PRO77:CA/+/2abm:G:LEU99:CA/+/2abm:G:PHE145:CA/+/2abm:H:ALA65:CA/+/2abm:H:SER118:CA/+/2abm:F:VAL85:CA/+/2abm:H:THR153:CA/+/2abm:G:GLY219:CA/+/2abm:F:HIS124:CA/+/2abm:E:GLY122:CA/+/2abm:H:PRO30:CA/+/2abm:F:LEU168:CA/+/2abm:G:PRO187:CA/+/2abm:F:GLU203:CA/+/2abm:F:VAL140:CA/+/2abm:F:LEU132:CA/+/2abm:E:ALA159:CA/+/2abm:H:TYR121:CA/+/2abm:H:ARG3:CA/+/2abm:E:GLU31:CA/+/2abm:F:GLY83:CA/+/2abm:G:PHE156:CA/+/2abm:E:THR181:CA/+/2abm:F:SER125:CA/+/2abm:F:ALA188:CA/+/2abm:F:CYS9:CA/+/2abm:E:ALA6:CA/+/2abm:F:GLY115:CA/+/2abm:F:SER142:CA/+/2abm:E:VAL148:CA/+/2abm:F:ALA95:CA/+/2abm:G:ILE164:CA/+/2abm:H:LYS79:CA/+/2abm:E:VAL47:CA/+/2abm:F:ILE164:CA/+/2abm:F:ALA78:CA/+/2abm:G:LEU15:CA/+/2abm:E:ILE102:CA/+/2abm:F:GLN88:CA/+/2abm:E:SER177:CA/+/2abm:G:HIS174:CA/+/2abm:G:PHE36:CA/+/2abm:F:ALA113:CA/+/2abm:E:VAL180:CA/+/2abm:G:ILE173:CA/+/2abm:G:ALA194:CA/+/2abm:E:THR46:CA/+/2abm:F:ALA159:CA/+/2abm:H:GLY59:CA/+/2abm:F:ALA87:CA/+/2abm:H:ILE166:CA/+/2abm:F:ASN182:CA/+/2abm:F:MET1:CA/+/2abm:E:PHE196:CA/+/2abm:G:CYS9:CA/+/2abm:F:ALA134:CA/+/2abm:E:GLY92:CA/+/2abm:F:ILE34:CA/+/2abm:F:HIS56:CA/+/2abm:H:ALA134:CA/+/2abm:G:VAL180:CA/+/2abm:H:ALA143:CA/+/2abm:E:ALA157:CA/+/2abm:E:LEU175:CA/+/2abm:G:GLY115:CA/+/2abm:H:ILE149:CA/+/2abm:E:GLY215:CA/+/2abm:E:GLY199:CA/+/2abm:E:GLY198:CA/+/2abm:G:ALA113:CA/+/2abm:H:LYS106:CA/+/2abm:F:VAL185:CA/+/2abm:G:MET50:CA/+/2abm:E:LEU70:CA/+/2abm:G:ASN186:CA/+/2abm:G:ALA6:CA/+/2abm:G:LEU147:CA/+/2abm:E:SER125:CA/+/2abm:E:ALA27:CA/+/2abm:H:LEU41:CA/+/2abm:F:ALA201:CA/+/2abm:G:HIS124:CA/+/2abm:E:GLY74:CA/+/2abm:G:THR67:CA/+/2abm:G:LEU139:CA/+/2abm:E:ILE222:CA/+/2abm:G:SER58:CA/+/2abm:H:LEU221:CA/+/2abm:F:TRP206:CA/+/2abm:H:TYR100:CA/+/2abm:H:SER58:CA/+/2abm:G:ASN119:CA/+/2abm:G:ALA27:CA/+/2abm:G:PRO163:CA/+/2abm:E:ALA95:CA/+/2abm:G:SER133:CA/+/2abm:G:GLU8:CA/+/2abm:H:ALA6:CA/+/2abm:E:VAL16:CA/+/2abm:F:GLY220:CA/+/2abm:E:VAL89:CA/+/2abm:E:GLY220:CA/+/2abm:F:PHE13:CA/+/2abm:G:ALA26:CA/+/2abm:H:PHE196:CA/+/2abm:H:GLY220:CA/+/2abm:F:LEU48:CA/+/2abm:E:GLY69:CA/+/2abm:F:VAL16:CA/+/2abm:F:PHE156:CA/+/2abm:G:ALA78:CA/+/2abm:H:VAL66:CA/+/2abm:E:LEU168:CA/+/2abm:G:SER114:CA/+/2abm:F:ALA72:CA/+/2abm:H:THR107:CA/+/2abm:H:GLY219:CA/+/2abm:H:ILE195:CA/+/2abm:H:TRP71:CA/+/2abm:E:TYR223:CA/+/2abm:F:GLY59:CA/+/2abm:F:THR181:CA/+/2abm:H:SER142:CA/+/2abm:F:LEU170:CA/+/2abm:F:VAL47:CA/+/2abm:F:GLU31:CA/+/2abm:F:ALA194:CA/+/2abm:F:ALA112:CA/+/2abm:G:THR225:CA/+/2abm:H:GLY144:CA/+/2abm:H:LEU202:CA/+/2abm:E:GLY144:CA/+/2abm:G:VAL47:CA/+/2abm:H:LEU25:CA/+/2abm:G:ALA162:CA/+/2abm:G:LEU101:CA/+/2abm:F:PRO64:CA/+/2abm:G:GLY160:CA/+/2abm:F:VAL54:CA/+/2abm:H:VAL180:CA/+/2abm:H:VAL211:CA/+/2abm:H:THR183:CA/+/2abm:G:PRO179:CA/+/2abm:G:GLY69:CA/+/2abm:H:ALA113:CA/+/2abm:H:PRO163:CA/+/2abm:G:ILE195:CA/+/2abm:H:GLY151:CA/+/2abm:E:SER104:CA/+/2abm:G:VAL16:CA/+/2abm:G:GLY60:CA/+/2abm:F:SER177:CA/+/2abm:H:GLY91:CA/+/2abm:E:ILE176:CA/+/2abm:F:ALA51:CA/+/2abm:F:ILE68:CA/+/2abm:H:GLY38:CA/+/2abm:H:VAL81:CA/+/2abm:H:ILE178:CA/+/2abm:H:ASN182:CA/+/2abm:E:ILE195:CA/+/2abm:H:TRP209:CA/+/2abm:E:PHE76:CA/+/2abm:H:PRO77:CA/+/2abm:G:SER118:CA/+/2abm:H:ILE57:CA/+/2abm:G:SER130:CA/+/2abm:E:GLY11:CA/+/2abm:H:GLY122:CA/+/2abm:H:GLU8:CA/+/2abm:F:LEU99:CA/+/2abm:H:PHE156:CA/+/2abm:G:GLY35:CA/+/2abm:F:ALA7:CA/+/2abm:F:PRO187:CA/+/2abm:F:ALA53:CA/+/2abm:F:LEU70:CA/+/2abm:F:THR67:CA/+/2abm:E:ALA37:CA/+/2abm:F:TYR129:CA/+/2abm:E:ALA134:CA/+/2abm:F:GLY167:CA/+/2abm:E:LEU99:CA/+/2abm:E:LEU202:CA/+/2abm:H:VAL82:CA/+/2abm:G:TYR129:CA/+/2abm:H:VAL193:CA/+/2abm:H:PHE116:CA/+/2abm:G:PHE207:CA/+/2abm:G:GLU138:CA/+/2abm:E:ASN119:CA/+/2abm:G:SER142:CA/+/2abm:E:PHE36:CA/+/2abm:F:ASN63:CA/+/2abm:H:VAL39:CA/+/2abm:E:ALA40:CA/+/2abm:E:THR49:CA/+/2abm:F:PHE207:CA/+/2abm:E:GLY115:CA/+/2abm:F:PHE196:CA/+/2abm:F:ALA157:CA/+/2abm:E:PHE145:CA/+/2abm:E:LEU132:CA/+/2abm:H:VAL140:CA/+/2abm:G:THR153:CA/+/2abm:G:GLU80:CA/+/2abm:E:ALA42:CA/+/2abm:F:MET50:CA/+/2abm:H:GLY83:CA/+/2abm:H:ALA26:CA/+/2abm:H:GLU31:CA/+/2abm:H:ILE86:CA/+/2abm:G:ILE222:CA/+/2abm:E:ARG189:CA/+/2abm:F:GLY35:CA/+/2abm:F:VAL180:CA/+/2abm:G:CYS20:CA/+/2abm:E:TYR84:CA/+/2abm:G:THR49:CA/+/2abm:H:PRO158:CA/+/2abm:H:GLN88:CA/+/2abm:H:LEU205:CA/+/2abm:H:ALA159:CA/+/2abm:F:GLY18:CA/+/2abm:F:ILE57:CA/+/2abm:G:ALA143:CA/+/2abm:E:VAL210:CA/+/2abm:F:ILE149:CA/+/2abm:H:SER22:CA/+/2abm:H:PHE10:CA/+/2abm:F:PRO158:CA/+/2abm:F:VAL89:CA/+/2abm:H:GLY167:CA/+/2abm:G:VAL82:CA/+/2abm:G:GLY220:CA/+/2abm:G:ALA37:CA/+/2abm:H:ALA7:CA/+/2abm:E:VAL85:CA/+/2abm:E:GLN197:CA/+/2abm:G:GLN204:CA/+/2abm:H:TRP200:CA/+/2abm:E:ALA165:CA/+/2abm:F:GLY144:CA/+/2abm:E:TRP71:CA/+/2abm:H:ALA51:CA/+/2abm:H:ALA37:CA/+/2abm:G:GLY198:CA/+/2abm:H:VAL47:CA/+/2abm:G:SER177:CA/+/2abm:G:VAL210:CA/+/2abm:E:ILE218:CA/+/2abm:E:LEU45:CA/+/2abm:H:PHE2:CA/+/2abm:H:LEU139:CA/+/2abm:F:ARG75:CA/+/2abm:G:PRO30:CA/+/2abm:G:ILE217:CA/+/2abm:H:PRO212:CA/+/2abm:G:THR12:CA/+/2abm:G:LEU205:CA/+/2abm:F:PHE17:CA/+/2abm:F:PHE62:CA/+/2abm:G:ALA95:CA/+/2abm:H:GLY21:CA/+/2abm:H:LEU101:CA/+/2abm:F:GLY128:CA/+/2abm:H:HIS174:CA/+/2abm:H:GLU123:CA/+/2abm:E:GLU8:CA/+/2abm:F:ILE93:CA/+/2abm:G:ALA7:CA/+/2abm:G:LEU132:CA/+/2abm:F:ASN186:CA/+/2abm:E:GLY219:CA/+/2abm:F:VAL24:CA/+/2abm:F:THR225:CA/+/2abm:E:LEU141:CA/+/2abm:G:LEU221:CA/+/2abm:H:PHE62:CA/+/2abm:H:ALA112:CA/+/2abm:G:GLY144:CA/+/2abm:G:ASN63:CA/+/2abm:G:VAL193:CA/+/2abm:F:ALA152:CA/+/2abm:H:PHE109:CA/+/2abm:F:VAL136:CA/+/2abm:F:HIS174:CA/+/2abm:E:SER133:CA/+/2abm:F:PHE116:CA/+/2abm:G:ALA42:CA/+/2abm:G:GLY91:CA/+/2abm:H:GLY160:CA/+/2abm:H:ALA78:CA/+/2abm:F:TYR100:CA/+/2abm:E:LEU139:CA/+/2abm:F:ILE217:CA/+/2abm:G:LYS4:CA/+/2abm:E:ALA72:CA/+/2abm:G:ALA53:CA/+/2abm:E:GLU80:CA/+/2abm:G:ALA23:CA/+/2abm:G:TRP206:CA/+/2abm:E:PHE207:CA/+/2abm:E:GLY108:CA/+/2abm:G:GLY108:CA/+/2abm:F:ILE86:CA/+/2abm:E:GLY127:CA/+/2abm:E:LEU41:CA/+/2abm:E:ILE149:CA/+/2abm:G:GLY92:CA/+/2abm:G:VAL90:CA/+/2abm:G:GLY33:CA/+/2abm:E:ALA65:CA/+/2abm:H:VAL85:CA/+/2abm:G:LEU141:CA/+/2abm:H:LYS4:CA/+/2abm:G:SER104:CA/+/2abm:H:THR46:CA/+/2abm:E:GLY19:CA/+/2abm:F:TRP14:CA/+/2abm:H:PHE145:CA/+/2abm:E:GLY128:CA/+/2abm:F:THR107:CA/+/2abm:H:TYR129:CA/+/2abm:H:GLY69:CA/+/2abm:H:GLU203:CA/+/2abm:E:ALA26:CA/+/2abm:E:PRO126:CA/+/2abm:E:GLY35:CA/+/2abm:F:VAL66:CA/+/2abm:H:PHE13:CA/+/2abm:E:ARG3:CA/+/2abm:G:GLY59:CA/+/2abm:F:TRP209:CA/+/2abm:G:VAL89:CA/+/2abm:F:PHE29:CA/+/2abm:G:PRO158:CA/+/2abm:G:PHE43:CA/+/2abm:E:PRO158:CA/+/2abm:G:TRP14:CA/+/2abm:E:GLY160:CA/+/2abm:H:GLY19:CA/+/2abm:E:HIS61:CA/+/2abm:E:ASP110:CA/+/2abm:F:ASN119:CA/+/2abm:E:GLN88:CA/+/2abm:E:PHE116:CA/+/2abm:E:ALA194:CA/+/2abm:H:GLY35:CA/+/2abm:E:SER118:CA/+/2abm:H:THR12:CA/+/2abm:E:VAL66:CA/+/2abm:G:MET131:CA/+/2abm:G:PHE208:CA/+/2abm:F:ALA26:CA/+/2abm:F:LEU227:CA/+/2abm:H:GLY28:CA/+/2abm:E:PHE109:CA/+/2abm:F:TYR121:CA/+/2abm:G:VAL94:CA/+/2abm:F:GLY33:CA/+/2abm:F:LEU32:CA/+/2abm:F:LEU172:CA/+/2abm:H:VAL16:CA/+/2abm:H:ALA97:CA/+/2abm:G:LYS155:CA/+/2abm:F:ASP110:CA/+/2abm:H:MET131:CA/+/2abm:F:VAL82:CA/+/2abm:H:LEU5:CA/+/2abm:E:LEU32:CA/+/2abm:F:LEU221:CA/+/2abm:G:ILE86:CA/+/2abm:F:ILE218:CA/+/2abm:F:THR49:CA/+/2abm:E:VAL136:CA/+/2abm:E:LEU48:CA/+/2abm:E:SER22:CA/+/2abm:F:ALA27:CA/+/2abm:F:TRP71:CA/+/2abm:E:LEU135:CA/+/2abm:F:GLN197:CA/+/2abm:G:GLY215:CA/+/2abm:H:ALA201:CA/+/2abm:G:ALA192:CA/+/2abm:F:ILE213:CA/+/2abm:F:LYS4:CA/+/2abm:G:ALA103:CA/+/2abm:G:ALA117:CA/+/2abm:H:ILE68:CA/+/2abm:H:LEU32:CA/+/2abm:G:ALA157:CA/+/2abm:E:ALA103:CA/+/2abm:F:GLY11:CA/+/2abm:G:GLY18:CA/+/2abm:F:ILE173:CA/+/2abm:G:PHE76:CA/+/2abm:F:VAL210:CA/+/2abm:E:ILE173:CA/+/2abm:G:GLN197:CA/+/2abm:H:GLY11:CA/+/2abm:E:PRO179:CA/+/2abm:E:ASN182:CA/+/2abm:H:ALA169:CA/+/2abm:H:SER125:CA/+/2abm:H:VAL24:CA/+/2abm:E:GLY167:CA/+/2abm:E:HIS56:CA/+/2abm:G:LEU48:CA/+/2abm:H:ALA40:CA/+/2abm:G:ASP110:CA/+/2abm:F:LEU175:CA/+/2abm:E:TYR100:CA/+/2abm:F:GLY122:CA/+/2abm:F:SER133:CA/+/2abm:E:LEU25:CA/+/2abm:H:GLY199:CA/+/2abm:E:ILE57:CA/+/2abm:F:PRO126:CA/+/2abm:E:LYS4:CA/+/2abm:H:ARG224:CA/+/2abm:E:PRO212:CA/+/2abm:H:GLY198:CA/+/2abm:E:LEU5:CA/+/2abm:H:LEU147:CA/+/2abm:F:PRO179:CA/+/2abm:H:SER130:CA/+/2abm:E:PHE156:CA/+/2abm:E:VAL140:CA/+/2abm:H:CYS20:CA/+/2abm:E:VAL94:CA/+/2abm:E:GLY73:CA/+/2abm:G:PRO212:CA/+/2abm:H:GLY73:CA/+/2abm:H:HIS150:CA/+/2abm:E:TYR121:CA/+/2abm:H:LEU141:CA/+/2abm:E:VAL185:CA/+/2abm:E:GLY105:CA/+/2abm:H:ALA165:CA/+/2abm:E:ALA51:CA/+/2abm:E:TRP209:CA/+/2abm:E:GLN204:CA/+/2abm:F:ALA192:CA/+/2abm:G:SER22:CA/+/2abm:H:TRP14:CA/+/2abm:E:VAL137:CA/+/2abm:E:HIS124:CA/+/2abm:G:PHE116:CA/+/2abm:F:VAL137:CA/+/2abm:H:ARG75:CA/+/2abm:H:ASP110:CA/+/2abm:E:LEU227:CA/+/2abm:G:GLU31:CA/+/2abm:F:ILE195:CA/+/2abm:F:PHE109:CA/+/2abm:E:SER114:CA/+/2abm:H:PRO179:CA/+/2abm:H:VAL214:CA/+/2abm:E:THR12:CA/+/2abm:G:ILE213:CA/+/2abm:G:VAL39:CA/+/2abm:G:ALA87:CA/+/2abm:H:LEU98:CA/+/2abm:F:PHE36:CA/+/2abm:G:VAL211:CA/+/2abm:F:LYS106:CA/+/2abm:E:THR225:CA/+/2abm:H:ALA95:CA/+/2abm:E:LEU221:CA/+/2abm:E:PHE62:CA/+/2abm:E:GLY216:CA/+/2abm:H:ILE217:CA/+/2abm:E:GLY91:CA/+/2abm:E:ALA143:CA/+/2abm:H:LEU45:CA/+/2abm:E:ASN186:CA/+/2abm:F:GLY44:CA/+/2abm:E:GLU123:CA/+/2abm:H:HIS61:CA/+/2abm:G:VAL140:CA/+/2abm:F:SER58:CA/+/2abm:F:ALA169:CA/+/2abm:E:CYS20:CA/+/2abm:G:ALA65:CA/+/2abm:E:ILE68:CA/+/2abm:H:ALA117:CA/+/2abm:E:THR171:CA/+/2abm:H:ALA188:CA/+/2abm:G:ILE149:CA/+/2abm:H:GLY105:CA/+/2abm:E:ILE164:CA/+/2abm:F:LEU25:CA/+/2abm:G:PHE2:CA/+/2abm:G:THR181:CA/+/2abm:E:ARG75:CA",negate=False, only=True, xor=False, log=0, intersect=False)
self.superimposeAtoms(atoms1,atoms2)
self.superimposeAtoms.rigidfitAligner
rotMat = Numeric.identity(4).astype('d')
rotMat[:3,:3] = self.superimposeAtoms.rigidfitAligner.rotationMatrix
transMat = Numeric.array(self.superimposeAtoms.rigidfitAligner.translationMatrix)
#need to apply it on our geometry....but which level
print rotMat
print transMat
| 1,329.555556 | 18,130 | 0.664967 | 6,497 | 35,898 | 3.674003 | 0.150993 | 0.265982 | 0.066569 | 0.002178 | 0.013364 | 0.013364 | 0.013364 | 0.011605 | 0.009636 | 0.009636 | 0 | 0.219447 | 0.027634 | 35,898 | 27 | 18,131 | 1,329.555556 | 0.46439 | 0.003538 | 0 | 0 | 0 | 0.133333 | 0.979061 | 0.474364 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.133333 | null | null | 0.133333 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0c51787448c85a5f5ad754d702b55839a8069b65 | 5,147 | gyp | Python | ash/ash_resources.gyp | maidiHaitai/haitaibrowser | a232a56bcfb177913a14210e7733e0ea83a6b18d | [
"BSD-3-Clause"
] | 1 | 2020-09-15T08:43:34.000Z | 2020-09-15T08:43:34.000Z | ash/ash_resources.gyp | maidiHaitai/haitaibrowser | a232a56bcfb177913a14210e7733e0ea83a6b18d | [
"BSD-3-Clause"
] | null | null | null | ash/ash_resources.gyp | maidiHaitai/haitaibrowser | a232a56bcfb177913a14210e7733e0ea83a6b18d | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
# GN version: //ash/resources
'target_name': 'ash_resources',
'type': 'none',
'variables': {
'grit_out_dir': '<(SHARED_INTERMEDIATE_DIR)/ash/resources',
},
'actions': [
{
'action_name': 'ash_resources',
'variables': {
'grit_grd_file': 'resources/ash_resources.grd',
},
'includes': [ '../build/grit_action.gypi' ],
},
],
'includes': [ '../build/grit_target.gypi' ],
},
{
# Creates a 100% pak file containing resources for ash_unittests, etc.
# TODO(msw): Use ui_test.pak instead of its pieces? (no 200% support?)
# TODO(msw): Add copy steps and mac handling like repack_ui_test_pack?
# GN version: //ash/resources:ash_test_resources_100_percent
'target_name': 'ash_test_resources_100_percent',
'type': 'none',
'dependencies': [
'<(DEPTH)/ui/resources/ui_resources.gyp:ui_resources',
'ash_resources',
'../ash/wm/common/ash_wm_common_resources.gyp:ash_wm_common_resources',
],
'actions': [
{
'action_name': 'repack_ash_test_resources',
'variables': {
'pak_output': '<(PRODUCT_DIR)/ash_test_resources_100_percent.pak',
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/ash/resources/ash_resources_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ash/wm/common/resources/ash_wm_common_resources_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/resources/ui_resources_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/resources/webui_resources.pak',
],
'conditions': [
['chromeos==1', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/ui/chromeos/resources/ui_chromeos_resources_100_percent.pak',
],
}],
['toolkit_views==1', {
'pak_inputs': [
# TODO(msw): This seems bad, but follows repack_ui_test_pack's example.
'<(SHARED_INTERMEDIATE_DIR)/blink/public/resources/blink_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/app_list/resources/app_list_resources_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/views/resources/views_resources_100_percent.pak',
],
}],
],
},
'includes': [ '../build/repack_action.gypi' ],
},
],
'conditions': [
['chromeos==1', {
'dependencies': [
'<(DEPTH)/ui/chromeos/ui_chromeos.gyp:ui_chromeos_resources',
],
}],
['toolkit_views==1', {
'dependencies': [
'<(DEPTH)/ui/views/resources/views_resources.gyp:views_resources',
],
}],
],
},
{
# Creates a 200% pak file containing resources for ash_unittests, etc.
# TODO(msw): Use ui_test.pak instead of its pieces? (no 200% support?)
# TODO(msw): Add copy steps and mac handling like repack_ui_test_pack?
# GN version: //ash/resources:ash_test_resources_200_percent
'target_name': 'ash_test_resources_200_percent',
'type': 'none',
'dependencies': [
'<(DEPTH)/ui/resources/ui_resources.gyp:ui_resources',
'ash_resources',
'../ash/wm/common/ash_wm_common_resources.gyp:ash_wm_common_resources',
],
'actions': [
{
'action_name': 'repack_ash_test_resources',
'variables': {
'pak_output': '<(PRODUCT_DIR)/ash_test_resources_200_percent.pak',
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/ash/resources/ash_resources_200_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ash/wm/common/resources/ash_wm_common_resources_200_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/resources/ui_resources_200_percent.pak',
],
'conditions': [
['chromeos==1', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/ui/chromeos/resources/ui_chromeos_resources_200_percent.pak',
],
}],
['toolkit_views==1', {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/ui/app_list/resources/app_list_resources_200_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/views/resources/views_resources_200_percent.pak',
],
}],
],
},
'includes': [ '../build/repack_action.gypi' ],
},
],
'conditions': [
['chromeos==1', {
'dependencies': [
'<(DEPTH)/ui/chromeos/ui_chromeos.gyp:ui_chromeos_resources',
],
}],
['toolkit_views==1', {
'dependencies': [
'<(DEPTH)/ui/views/resources/views_resources.gyp:views_resources',
],
}],
],
},
],
}
| 38.410448 | 107 | 0.563046 | 519 | 5,147 | 5.236994 | 0.188825 | 0.099338 | 0.115894 | 0.076159 | 0.817881 | 0.798013 | 0.773731 | 0.747976 | 0.744665 | 0.688006 | 0 | 0.021637 | 0.299592 | 5,147 | 133 | 108 | 38.699248 | 0.732316 | 0.152516 | 0 | 0.638655 | 0 | 0 | 0.574517 | 0.445032 | 0 | 0 | 0 | 0.007519 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
a79b308877b4c9806a09da668e4d98e48d729b72 | 38,224 | py | Python | optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_0.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_0.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_0.py | sheride/axion_pheno | 7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5 | [
"MIT"
] | null | null | null | def selection_0():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,1000.0,201,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([2.5,7.5,12.5,17.5,22.5,27.5,32.5,37.5,42.5,47.5,52.5,57.5,62.5,67.5,72.5,77.5,82.5,87.5,92.5,97.5,102.5,107.5,112.5,117.5,122.5,127.5,132.5,137.5,142.5,147.5,152.5,157.5,162.5,167.5,172.5,177.5,182.5,187.5,192.5,197.5,202.5,207.5,212.5,217.5,222.5,227.5,232.5,237.5,242.5,247.5,252.5,257.5,262.5,267.5,272.5,277.5,282.5,287.5,292.5,297.5,302.5,307.5,312.5,317.5,322.5,327.5,332.5,337.5,342.5,347.5,352.5,357.5,362.5,367.5,372.5,377.5,382.5,387.5,392.5,397.5,402.5,407.5,412.5,417.5,422.5,427.5,432.5,437.5,442.5,447.5,452.5,457.5,462.5,467.5,472.5,477.5,482.5,487.5,492.5,497.5,502.5,507.5,512.5,517.5,522.5,527.5,532.5,537.5,542.5,547.5,552.5,557.5,562.5,567.5,572.5,577.5,582.5,587.5,592.5,597.5,602.5,607.5,612.5,617.5,622.5,627.5,632.5,637.5,642.5,647.5,652.5,657.5,662.5,667.5,672.5,677.5,682.5,687.5,692.5,697.5,702.5,707.5,712.5,717.5,722.5,727.5,732.5,737.5,742.5,747.5,752.5,757.5,762.5,767.5,772.5,777.5,782.5,787.5,792.5,797.5,802.5,807.5,812.5,817.5,822.5,827.5,832.5,837.5,842.5,847.5,852.5,857.5,862.5,867.5,872.5,877.5,882.5,887.5,892.5,897.5,902.5,907.5,912.5,917.5,922.5,927.5,932.5,937.5,942.5,947.5,952.5,957.5,962.5,967.5,972.5,977.5,982.5,987.5,992.5,997.5])
# Creating weights for histo: y1_PT_0
y1_PT_0_weights = numpy.array([0.0,0.0,0.0,0.0,3.53319457632,8.40515280693,12.7326011035,15.1562990293,16.1880061464,18.304648335,18.82459989,20.2493386708,22.0057011677,22.8122364775,23.0824442462,24.1059673703,24.6300109218,25.7067540003,27.221564704,27.3607645849,28.4456956564,29.1785350292,29.440558805,29.5756626894,30.1856821673,30.6728777504,31.4425650917,31.6718328955,31.8560687378,32.5930041071,32.6257560791,32.1917804505,32.7444839775,33.0146917463,33.3545034555,33.3667834449,33.2808075185,33.1661756166,33.1457036341,34.2306347057,33.7311591331,34.406682555,33.7884750841,33.6574631962,33.9931789089,33.7761910946,33.0638237042,33.3954434204,33.4609473644,33.0638237042,32.8632118759,33.1989275886,32.699448016,32.7608599635,32.0976165311,33.2316795606,32.3023243559,31.7373408394,32.154936482,31.692304878,31.64317692,31.7168688569,31.0372494385,31.123225365,30.6769697469,30.3207860517,30.6810657434,30.2389061218,29.952318367,29.6370746368,29.1949110152,28.912419257,29.1539710502,29.0311511554,28.4825436248,27.8356761784,28.261459814,27.3239166164,27.708760287,27.0414248581,27.246128683,26.3945574117,26.7098051419,26.5010053206,25.6576260424,24.9411586555,25.010758596,25.1827104488,25.0312305785,23.7170277031,23.6515237592,24.4048351145,24.0568394123,23.2339281166,23.4713839134,23.2175521306,22.8490844459,21.8173733288,21.5799175321,22.2267809785,21.9238212378,20.7324422573,21.133661914,21.3588377213,20.6300903449,20.2370586813,20.572770394,19.4796513295,19.2258195467,19.459179347,18.9924557464,18.4929761738,18.4151882404,18.3005563385,17.9484646398,18.1286044857,17.182869295,17.1787772985,16.9904494597,16.7529896629,16.6752057295,16.0733742445,16.069278248,16.0078703006,15.4633547666,15.6844345774,14.963875194,15.1235470574,14.7182314042,14.460303625,14.4398356425,14.1450598948,14.1614358807,13.7233682556,13.273020641,12.7571650825,13.0232808548,13.2361726726,12.5811212332,12.224933538,12.5565572542,12.4009813873,11.8032458989,11.655858025,11.3897422527,11.1973184174,11.2873903403,11.1318144735,10.6937468484,11.3774582633,10.566830957,10.3416551497,9.96499947203,9.8749315491,9.98547145451,9.74391966122,9.69479170327,9.44505191699,8.86369241452,9.06839623933,9.24034809218,9.08477222532,9.17074815174,8.52797670182,8.45837676138,8.17179300664,8.34783685598,8.13494503817,7.84835728343,7.72553738854,7.75419336402,7.5535855357,7.40210566533,7.49217358825,7.217869823,7.42257364782,6.91900207877,6.8780621138,6.59966235206,6.665170296,6.43180649571,6.25985464286,6.25575864637,6.07561880053,6.29670261133,5.95279890564,5.6621191544,5.94870290914,5.81359902476,5.60889519995,5.54339125601,5.46560332258,5.13807560287,5.12988760988,4.83101986565,5.01525170798,5.04391168346,4.79007990068,4.8801478236,4.7777959112])
# Creating weights for histo: y1_PT_1
y1_PT_1_weights = numpy.array([0.0,0.0,0.0,0.0,65.0884608056,206.192957741,351.333693747,477.549922582,603.371355931,703.55992331,627.170441736,430.623095569,259.698481115,136.118922128,58.8904200345,15.1389488129,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_2
y1_PT_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,149.567532082,408.09490162,617.05484814,750.806991886,829.863686247,849.531685067,821.643274286,775.690121869,730.382817641,682.362372521,574.876374684,437.127244666,321.926191449,232.553859386,169.866834079,122.139396605,88.6951211878,63.3307661232,45.4727950782,32.1700454491,21.3952770372,14.2270439239,9.57878400613,5.25061769076,2.76071672406,0.612410774045,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_3
y1_PT_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,58.7229570322,154.901423901,227.072446524,279.192150822,307.506320361,316.934234003,316.461513925,312.516478805,296.679381166,285.862232752,270.634641759,256.491511901,239.391809645,225.621907645,212.156982409,196.136906836,181.37886712,168.388408865,155.979953691,144.567030102,125.953087946,104.126631789,85.5308086728,70.2887549474,56.70333403,47.6943927296,40.1470323604,34.1026138978,28.5853579489,23.8433166151,19.801722448,17.1939781424,14.8126121534,12.81568264,10.8251151035,9.37817565715,8.29506329341,7.05101241212,6.03935650643,5.42904140977,4.47702802272,3.50345135249,3.02517043501,2.56351515906,2.22184408787,1.75477950645,1.55682533298,1.18808132778,0.934843768041,0.781195929861,0.616000656561,0.428952439179,0.258553873088,0.15950049138,0.0934251446045,0.0219956457445,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_4
y1_PT_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,7.13607366506,19.4381983514,29.9377039982,37.3900664195,41.6986249046,45.2657518478,46.0053275361,46.4555423825,46.4142967382,45.6721156399,44.5225285855,43.3382294534,41.7675680612,40.2014761574,38.8513166253,37.3186461192,35.3593617728,34.1667694203,32.6133718756,31.3480044096,28.153871969,24.1029123738,20.5955695691,17.7615087976,15.2054733303,13.5533788468,11.821302285,10.4832599135,9.3485677876,8.46315728131,7.58796799883,7.00054424566,6.35030606179,5.65362343858,5.02784404203,4.65791590657,4.31146852762,3.81418674931,3.49827477545,3.08590130458,2.86777998765,2.65567075447,2.36435425176,2.11075967655,1.93026528987,1.76153533137,1.56024456215,1.46456268398,1.30358642562,1.22870052917,1.08551561356,0.953287849867,0.874372787073,0.767754199431,0.718423767498,0.620788633504,0.586162333895,0.533846102113,0.503321519493,0.393769721201,0.367104993358,0.316760175107,0.231937285035,0.210200509815,0.236850607278,0.162845339404,0.150011210039,0.12334231354,0.099674568768,0.0947176359693,0.0621636794664,0.0335593922349,0.0345439806788,0.0207222606362,0.0148002879852,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_5
y1_PT_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.45977311125,4.00325430469,6.27416006981,7.96481829624,9.11565898536,9.84380310995,10.2961310045,10.592816397,10.6607452009,10.6568402349,10.5501818503,10.394239274,10.1673031357,9.92047807741,9.66317042595,9.51062068897,9.19188184098,8.83458145383,8.60136776261,8.25675451394,7.52395323728,6.61548450491,5.57898009972,4.85784170817,4.20085918718,3.70455121304,3.30552610424,2.95761323932,2.61786079309,2.43559210439,2.17670366188,1.96342570226,1.85121914518,1.70859706405,1.57296391181,1.43550950904,1.35940748331,1.25507927513,1.15805687422,1.08718654268,1.02697164702,1.0020110724,0.919296210821,0.873224013715,0.812194516549,0.799818254972,0.727979683912,0.693226286793,0.659695192041,0.633960826005,0.605229318551,0.565935998369,0.560611881316,0.497338629101,0.482716211881,0.433583257621,0.431295491685,0.40332497248,0.366514746554,0.333243356047,0.317348544069,0.277290434697,0.274999828059,0.267451760856,0.233930548549,0.223337047983,0.205198560976,0.196103151,0.18377878226,0.157043688669,0.148983894878,0.141163000185,0.126796006151,0.106376394852,0.104612878602,0.101092727805,0.0930337742219,0.0781530531048,0.0768795380691,0.0695656087865,0.0531884374851,0.045623446096,0.0416028915117,0.0388286852427,0.0335303465666,0.028232996135,0.0272265991776,0.021426780448,0.0166343429191,0.0128626458959,0.0103371051405,0.0115944121667,0.00756321494965,0.00252088920475,0.00252425483732,0.000756128647259,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_6
y1_PT_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.425254121717,1.16325971125,1.86968469649,2.28210432435,2.68374756782,2.90402666133,3.08055743296,3.24501625489,3.23053411383,3.33116535075,3.28321943722,3.31185682901,3.27162932586,3.22385335531,3.172905449,3.14328838591,3.04501635745,2.9323111715,2.83401815009,2.79087962363,2.74531091252,2.66329343045,2.58195272132,2.50497754749,2.36609414467,2.34439842387,2.20891188137,2.22264427397,2.13156083365,2.05251835343,1.96425096828,1.92784018457,1.85730584976,1.77399380203,1.72427048485,1.72630980063,1.63745661203,1.55137049504,1.51404101957,1.46568624322,1.36513198048,1.20549554058,1.09852842944,0.942887149111,0.891375631895,0.800469732005,0.682524404195,0.622627100519,0.573528373779,0.545848760649,0.519921859461,0.458939719761,0.448020483307,0.421095716916,0.377629300463,0.362135998668,0.339534681703,0.326715582677,0.295468266558,0.296841805718,0.265980160201,0.264556737777,0.258258950767,0.247704591891,0.217610488854,0.221344635999,0.202921017362,0.202406989825,0.200402562337,0.191805646714,0.162599445773,0.172320284306,0.172346775418,0.151467180793,0.159142205722,0.135393273798,0.133401342117,0.129978090662,0.128585557875,0.119923364153,0.117926634082,0.112528745144,0.101910507701,0.103885345117,0.0962016131089,0.0996425086438,0.0775722634978,0.0764433522622,0.0730260788018,0.0767291563712,0.0707361171137,0.0647091592364,0.0618299252568,0.0549642387838,0.0515395378155,0.05868815936,0.0498154562672,0.0483949728579,0.0449361833267,0.0466636037547,0.0432291660531,0.0355028483331,0.0406631669792,0.034918254472,0.0335082475476,0.0317794975657,0.0300596745886,0.0340671500286,0.033755494593,0.0257730028227,0.030052716923,0.0231814823115,0.0211780844775,0.0211836126227,0.0140297228457,0.0229099734063,0.0177404478493,0.0188837642518,0.0143207252106,0.0174595121069])
# Creating weights for histo: y1_PT_7
y1_PT_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.058952634796,0.164191845946,0.257071021864,0.337077148291,0.38781262822,0.438859998689,0.478369346971,0.492822678192,0.511520185053,0.526463438395,0.531748646156,0.533351684747,0.534818517446,0.53448240321,0.525804201871,0.525666319597,0.515666711544,0.500721781821,0.502366729919,0.491852054041,0.479395710765,0.475773891285,0.463552241241,0.45541928257,0.443828370587,0.436992092021,0.427800080142,0.412705575394,0.41291784704,0.397994668351,0.385766312786,0.372878343511,0.365086821583,0.357116136519,0.342417006047,0.335728123213,0.327896200522,0.320608430852,0.313168362038,0.298569772471,0.284407586585,0.250141746106,0.222132869094,0.197519793598,0.171760828399,0.159374935002,0.144609419873,0.13080987694,0.122389754337,0.108609364048,0.104160334745,0.0927933033409,0.0872937703686,0.0799916676479,0.0753286903183,0.0730558120972,0.0651273718142,0.0607341659726,0.0590728312583,0.0563660639249,0.0520117920141,0.0494105947256,0.0449388085609,0.0439587966803,0.0425391959946,0.0408377496924,0.0396221896261,0.0346927265138,0.035190707833,0.0326964342663,0.0323247681972,0.0306456009881,0.0287656243827,0.0280947160612,0.0246123546521,0.0253497146858,0.0238535579062,0.0239685198712,0.0232120281481,0.0207444132179])
# Creating weights for histo: y1_PT_8
y1_PT_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0115345699134,0.037131312779,0.054947469898,0.0720011862713,0.0901486095199,0.101584166381,0.107075034641,0.11361451137,0.116586803697,0.124172290206,0.119228201135,0.122643844797,0.12493392907,0.126389135975,0.128825340865,0.127772663589,0.12487819831,0.127105082447,0.124108020894,0.123885201803,0.121405160728,0.114909995373,0.11460825036,0.119473354109,0.112053797658,0.110761610278,0.107263254032,0.103949672058,0.104333015932,0.103521110981,0.101115941873,0.0988254863579,0.0995887141354,0.0953834548577,0.0937347866324,0.0880003394628,0.0894376822313,0.0882761421266,0.084644406269,0.0852531976226])
# Creating weights for histo: y1_PT_9
y1_PT_9_weights = numpy.array([0.0,0.0,0.0,0.0,24284.9498946,62738.5042731,83728.7175448,98151.6521433,107455.876917,111225.53202,94876.6453111,65075.8942335,38296.4543337,18644.821422,8291.28401339,1921.07291905,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_10
y1_PT_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,18032.4138292,47611.0210328,73762.1116719,89623.3844815,94082.1875475,91112.9482895,82811.6523573,73061.7311894,63686.7596343,55124.2109703,44743.3202077,33723.1182315,24637.3253629,17510.0314892,12468.2616132,9213.94522115,6906.97971435,4982.8014023,3686.37146568,2799.61159143,2125.42294579,1509.30127882,1041.61079386,718.325351627,374.992706028,129.559654425,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_11
y1_PT_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2966.68822558,7941.78511997,11931.2014369,14498.1422993,15608.5610339,15645.2089481,14925.2299747,14257.7868506,13330.4270983,12494.5142275,11516.5820443,10777.5682873,9911.77764544,9263.41925498,8406.35064767,7827.05770664,7184.42050973,6469.50569562,5909.38586128,5383.87751893,4624.66860184,3867.13877244,3117.41266643,2590.559517,2076.5204579,1725.71062437,1439.35047814,1199.11375674,1044.13473052,891.639370857,760.314348515,671.437201145,573.529097873,489.923978496,420.398989269,385.586928909,355.873339582,301.528225315,258.444986631,228.947719129,188.86455293,183.360949157,161.705098059,132.893605323,118.165047621,108.491350779,91.9038086119,86.6073819764,71.8629939734,58.0442174736,47.9106734557,40.9974044768,26.9561968856,20.0415177805,12.8966192297,5.298090354,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_12
y1_PT_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,211.643248619,585.864523948,929.50181634,1184.4215597,1356.52258659,1420.74072555,1438.89657272,1444.03443933,1417.65762087,1384.02161821,1323.87183862,1275.13616344,1221.59232226,1154.85507056,1112.07247272,1056.60890452,984.753791382,948.011985867,897.229243053,847.760301397,769.255007639,644.282702075,543.445096912,454.969148799,381.115172799,327.341156735,285.168254964,247.416996197,224.792647762,198.963432402,180.98970924,168.88919662,154.688448779,144.050698901,136.796092803,127.380928197,123.943770455,115.421413565,103.48390462,95.6431470011,84.2366214709,74.5715076371,71.5225271625,68.5065936921,60.7568596028,55.0218770996,51.9208062365,46.5474172058,44.0004895897,42.0603342099,38.0728842162,31.7073887243,29.1592454093,30.9034539826,27.8028332359,25.1147191228,23.5941337411,21.072104869,19.3560805044,17.8042947497,17.4718588144,14.9252312758,14.3169009444,12.432875459,11.2976050665,9.91394354133,9.1097625822,8.49982414279,7.72598948977,6.11983583464,5.03889487101,4.59633816162,3.04578157074,2.52034382107,1.66134099808,0.609007429508,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_13
y1_PT_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,38.6522856981,109.694413151,172.499809492,224.082722198,261.084768306,282.070611236,298.199806283,301.248887656,304.976830496,298.334219276,293.633648241,289.823055064,284.747644726,272.884529955,264.934744793,252.973202088,243.45039047,235.407942266,225.817954492,217.639758268,199.415358974,168.920115384,144.698226552,121.932306542,100.679336752,87.4185695873,75.8859347979,67.1765190067,59.8867228793,52.3779701105,45.7822729691,41.3685568593,36.4369957311,34.1988282479,30.4961295631,28.7221571992,26.676737341,24.4076214937,21.7074313452,20.8079201782,19.185849667,17.7225988781,15.9895511744,15.487143928,15.4547816485,14.6684079912,13.3984752481,13.4295935248,12.9257238165,11.9978008377,11.7157823532,11.9171105638,12.0677077306,11.956372508,11.9071706775,12.0887040717,11.7159158559,11.3529218872,10.7975383235,9.87013115077,9.40755020113,8.72030233341,8.70052572196,7.48029241054,7.42987691867,6.75469891293,6.50274281972,5.81762432081,5.90774231594,5.45478084605,5.33267559276,4.89957022914,4.36548884926,4.42564033236,4.2344619715,3.46778967868,3.1149583031,3.14532289604,2.78290359591,2.82271593553,2.65221168869,2.48026561218,2.33924544698,2.12712535979,1.97622052985,1.76418479211,1.38120849629,1.13954243071,1.19967085425,1.31077001971,0.736125650677,0.685423734734,0.625155740146,0.594810019632,0.312478653259,0.0906632324841,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y1_PT_14
y1_PT_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,10.6042127884,30.6142149489,48.327795487,63.625846148,74.0160195786,81.4914303343,86.2475655407,89.1999944026,91.0456375641,90.9803469418,90.2612267164,89.5854668512,88.5472420755,87.0571769528,84.5761333032,83.3544253348,80.733066482,78.2724141287,76.5895165962,74.278797982,71.1686251298,68.9355858124,66.9493581287,65.0062984345,62.4582713002,60.4706970214,58.2149194849,55.6787808611,53.3808741368,52.4564174049,50.1461220064,48.8818616347,47.4342719669,45.3513972335,43.1641036733,41.9507830682,40.8329552886,39.1782593037,38.1211014018,35.7922883977,32.9455018404,29.3985705056,25.502013082,21.9855186425,19.2639346446,17.2325190787,14.9156522964,13.9337575149,12.334952919,11.2122004489,10.0716344502,9.19198085898,8.55223279264,8.03759879516,7.21747624694,6.73875401546,6.21270858687,5.88190404939,5.58775382889,5.04153255919,4.83214472533,4.33161534903,4.37109366819,4.36531869917,4.17313265509,3.88733901867,3.65829320568,3.48528229022,3.34710509526,3.16572605348,3.12911790547,3.12329407429,3.03271709028,3.0949620994,3.2111228502,3.0865793529,3.19985685136,2.88570084547,2.78648141449,2.60572873178,2.58036311338,2.48668011172,2.26889914335,2.22935541815,2.05965751124,1.92944869623,1.88704595739,1.886928996,1.73415740399,1.73993160352,1.53903924539,1.57303923118,1.33246387146,1.3240999773,1.22506022057,1.26471859871,1.26466588914,1.13160152317,1.1401239308,0.984520641796,0.919525885605,0.928044061085,0.939285821219,0.831805991522,0.874219887861,0.792190704417,0.843071605625,0.611137538642,0.684587757459,0.687457159118,0.574341636808,0.622455862322,0.695885689842,0.605447982228,0.475319578185,0.534695570388,0.506433618434,0.500837169448,0.446979138977,0.441409237151])
# Creating weights for histo: y1_PT_15
y1_PT_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.62223918695,4.36270648813,6.79671930815,8.68546988546,10.5664109817,11.7829742755,12.8590803485,13.0701573035,13.7400427458,13.8044186765,14.1816346892,14.2943280173,13.9799990647,13.8957006268,13.9307245362,13.9606201135,13.534283186,13.1379481836,13.0265664797,12.8129962572,12.6325356819,12.0642833857,11.8441195933,11.719902879,11.3442643603,11.0515512098,10.9309621408,10.5669722623,10.1426866677,9.83149852323,9.8877967399,9.56697583867,9.5425205475,8.95307547757,8.64673797911,8.30973565358,8.064791618,8.0130427283,7.57962775702,7.69193705102,6.924843717,6.05717835257,5.50341536745,4.66543997431,4.18120015845,3.59052499777,3.17296413557,3.00455986756,2.70084916231,2.33825953127,2.30599830401,1.96235514275,1.89036470214,1.68900085565,1.51687795596,1.43966110673,1.24865436436,1.22631657812,1.15308044907,1.00989812244,0.979335862021,0.939873109251,0.828533353637,0.872793460321,0.757073352738,0.69171275858,0.638245404931,0.577279933284,0.609442611485,0.609121204489,0.528659444405,0.493355839965,0.461361782273,0.428039557434,0.460067291962,0.411280664025,0.415736995663,0.368789359301,0.344425645499,0.359598182685])
# Creating weights for histo: y1_PT_16
y1_PT_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.31869230334,0.860029746775,1.41917288124,1.89769006834,2.28034144849,2.55601918943,2.77291885124,2.93844873644,3.07626141827,3.22033202835,3.21869138743,3.25752759172,3.29572294473,3.3246594601,3.30514045468,3.25782336924,3.21528763521,3.20856254795,3.16366714094,3.15150252026,3.08469569905,3.03556466555,2.98294630728,2.9621814161,2.85470634157,2.84015855517,2.77508364997,2.72708257902,2.69125306103,2.58925487723,2.52599276472,2.51484372388,2.4608589351,2.33993407117,2.31567030141,2.2552250076,2.19686787225,2.1679629373,2.11200706865,2.06975748396])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights+y1_PT_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights+y1_PT_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y1_PT_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=3, linestyle="dashed",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"p_{T} [ j_{1} ] ( GeV ) ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights+y1_PT_16_weights).max()*1.1
#ymin=0 # linear scale
ymin=min([x for x in (y1_PT_0_weights+y1_PT_1_weights+y1_PT_2_weights+y1_PT_3_weights+y1_PT_4_weights+y1_PT_5_weights+y1_PT_6_weights+y1_PT_7_weights+y1_PT_8_weights+y1_PT_9_weights+y1_PT_10_weights+y1_PT_11_weights+y1_PT_12_weights+y1_PT_13_weights+y1_PT_14_weights+y1_PT_15_weights+y1_PT_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
#plt.gca().set_yscale("linear")
plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_0.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_0.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_0.eps')
# Running!
if __name__ == '__main__':
selection_0()
| 197.030928 | 2,757 | 0.729934 | 8,989 | 38,224 | 3.022583 | 0.198576 | 0.321237 | 0.478763 | 0.634376 | 0.378837 | 0.378837 | 0.371881 | 0.36901 | 0.367611 | 0.367611 | 0 | 0.566135 | 0.043297 | 38,224 | 193 | 2,758 | 198.051813 | 0.176844 | 0.02522 | 0 | 0.185841 | 0 | 0.00885 | 0.028825 | 0.005373 | 0 | 0 | 0 | 0 | 0 | 1 | 0.00885 | false | 0 | 0.035398 | 0 | 0.044248 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a7a544e5819c6914af651d079554c0c428c689f4 | 56 | py | Python | financescraper/__init__.py | LukasBudach/yahoo-finance-scraper | 629dad4067ea689a7f535ce913a7c98b460e6511 | [
"MIT"
] | 1 | 2019-12-01T14:02:42.000Z | 2019-12-01T14:02:42.000Z | financescraper/__init__.py | LukasBudach/FinanceScraper | 629dad4067ea689a7f535ce913a7c98b460e6511 | [
"MIT"
] | null | null | null | financescraper/__init__.py | LukasBudach/FinanceScraper | 629dad4067ea689a7f535ce913a7c98b460e6511 | [
"MIT"
] | null | null | null | from .core import scraper
from .core import conversions
| 18.666667 | 29 | 0.821429 | 8 | 56 | 5.75 | 0.625 | 0.347826 | 0.608696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142857 | 56 | 2 | 30 | 28 | 0.958333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
ac69b4c42f520795392d0f7fa2262b982f3892b2 | 2,470 | py | Python | test/docstrings/oneline1.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 1,482 | 2015-10-16T21:59:32.000Z | 2022-03-30T11:44:40.000Z | test/docstrings/oneline1.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 226 | 2015-10-15T15:53:44.000Z | 2022-03-25T03:08:27.000Z | test/docstrings/oneline1.py | kylebarron/MagicPython | da6fa0793e2c85d3bf7709ff1d4f65ccf468db11 | [
"MIT"
] | 129 | 2015-10-20T02:41:49.000Z | 2022-03-22T01:44:36.000Z | '''>>> print("""docstring""")'''
async
""">>> print('''docstring''')"""
await
"""\n>>> print('''docstring''')"""
await
""" >>> print('''docstring''')"""
await
""" 1 >>> print('''docstring''')"""
await
''' : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.multi.python
>>> : keyword.control.flow.python, source.python, string.quoted.docstring.multi.python
print("""docstring""") : source.python, string.quoted.docstring.multi.python
''' : punctuation.definition.string.end.python, source.python, string.quoted.docstring.multi.python
async : keyword.control.flow.python, source.python
""" : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.multi.python
>>> : keyword.control.flow.python, source.python, string.quoted.docstring.multi.python
print('''docstring''') : source.python, string.quoted.docstring.multi.python
""" : punctuation.definition.string.end.python, source.python, string.quoted.docstring.multi.python
await : keyword.control.flow.python, source.python
""" : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.multi.python
\n : constant.character.escape.python, source.python, string.quoted.docstring.multi.python
>>> print('''docstring''') : source.python, string.quoted.docstring.multi.python
""" : punctuation.definition.string.end.python, source.python, string.quoted.docstring.multi.python
await : keyword.control.flow.python, source.python
""" : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.multi.python
: source.python, string.quoted.docstring.multi.python
>>> : keyword.control.flow.python, source.python, string.quoted.docstring.multi.python
print('''docstring''') : source.python, string.quoted.docstring.multi.python
""" : punctuation.definition.string.end.python, source.python, string.quoted.docstring.multi.python
await : keyword.control.flow.python, source.python
""" : punctuation.definition.string.begin.python, source.python, string.quoted.docstring.multi.python
1 >>> print('''docstring''') : source.python, string.quoted.docstring.multi.python
""" : punctuation.definition.string.end.python, source.python, string.quoted.docstring.multi.python
await : keyword.control.flow.python, source.python
| 63.333333 | 111 | 0.698381 | 270 | 2,470 | 6.388889 | 0.077778 | 0.173913 | 0.208696 | 0.278261 | 0.926377 | 0.926377 | 0.926377 | 0.926377 | 0.926377 | 0.926377 | 0 | 0.000938 | 0.136437 | 2,470 | 38 | 112 | 65 | 0.807782 | 0 | 0 | 0.866667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
3ba41a3a050e58b20dd49215c826cdf4b8075a08 | 2,364 | py | Python | app/users/operator/courses/forms.py | trinanda/AQUR | 2a415b05ba4c0113b05b6fa14fb454af2bad52ec | [
"MIT"
] | null | null | null | app/users/operator/courses/forms.py | trinanda/AQUR | 2a415b05ba4c0113b05b6fa14fb454af2bad52ec | [
"MIT"
] | null | null | null | app/users/operator/courses/forms.py | trinanda/AQUR | 2a415b05ba4c0113b05b6fa14fb454af2bad52ec | [
"MIT"
] | null | null | null | from flask_wtf import FlaskForm
from flask_babel import _, lazy_gettext as _l
from wtforms.fields import StringField, SubmitField, FileField, FloatField
from wtforms import ValidationError
from wtforms.fields.html5 import IntegerField
from wtforms.validators import InputRequired, Length
from app.models import Course
class AddCourseForm(FlaskForm):
name = StringField(_l('Course name'), validators=[InputRequired(), Length(1, 100)])
image = FileField(_l('Course Image'), validators=[InputRequired()])
private_class_charge_per_minutes = FloatField(_l('Private class charge per minutes'), validators=[InputRequired()])
regular_class_charge_per_minutes = FloatField(_l('Regular class charge per minutes'), validators=[InputRequired()])
min_private_class_duration = FloatField(_l('Min private class duration'), validators=[InputRequired()])
min_regular_class_duration = FloatField(_l('Min regular class duration'), validators=[InputRequired()])
min_private_class_charge_per_meet = FloatField(_l('Min private class charge per meet'), validators=[InputRequired()])
min_regular_class_charge_per_meet = FloatField(_l('Min regular class charge per meet'), validators=[InputRequired()])
submit = SubmitField(_l('Add course'))
def validate_name(form, field):
if Course.query.filter_by(name=field.data).first():
raise ValidationError(_('Course name already registered!'))
class EditCourseForm(FlaskForm):
name = StringField(_l('Course name'), validators=[InputRequired(), Length(1, 100)])
image = FileField(_l('Course Image'), validators=[InputRequired()])
private_class_charge_per_minutes = FloatField(_l('Private class charge per minutes'), validators=[InputRequired()])
regular_class_charge_per_minutes = FloatField(_l('Regular class charge per minutes'), validators=[InputRequired()])
min_private_class_duration = FloatField(_l('Min private class duration'), validators=[InputRequired()])
min_regular_class_duration = FloatField(_l('Min regular class duration'), validators=[InputRequired()])
min_private_class_charge_per_meet = FloatField(_l('Min private class charge per meet'), validators=[InputRequired()])
min_regular_class_charge_per_meet = FloatField(_l('Min regular class charge per meet'), validators=[InputRequired()])
submit = SubmitField(_l('Edit course'))
| 63.891892 | 121 | 0.769036 | 280 | 2,364 | 6.228571 | 0.2 | 0.211009 | 0.12844 | 0.09633 | 0.751147 | 0.751147 | 0.751147 | 0.751147 | 0.751147 | 0.751147 | 0 | 0.004317 | 0.11802 | 2,364 | 36 | 122 | 65.666667 | 0.832134 | 0 | 0 | 0.533333 | 0 | 0 | 0.195431 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.033333 | false | 0 | 0.233333 | 0 | 0.933333 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
ce05781cd24ff36e48a58573a0bca49ef626df24 | 221 | py | Python | bitmovin_api_sdk/encoding/encodings/sidecars/webvtt/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 11 | 2019-07-03T10:41:16.000Z | 2022-02-25T21:48:06.000Z | bitmovin_api_sdk/encoding/encodings/sidecars/webvtt/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 8 | 2019-11-23T00:01:25.000Z | 2021-04-29T12:30:31.000Z | bitmovin_api_sdk/encoding/encodings/sidecars/webvtt/__init__.py | jaythecaesarean/bitmovin-api-sdk-python | 48166511fcb9082041c552ace55a9b66cc59b794 | [
"MIT"
] | 13 | 2020-01-02T14:58:18.000Z | 2022-03-26T12:10:30.000Z | from bitmovin_api_sdk.encoding.encodings.sidecars.webvtt.webvtt_api import WebvttApi
from bitmovin_api_sdk.encoding.encodings.sidecars.webvtt.web_vtt_sidecar_file_list_query_params import WebVttSidecarFileListQueryParams
| 73.666667 | 135 | 0.918552 | 29 | 221 | 6.62069 | 0.62069 | 0.125 | 0.15625 | 0.1875 | 0.510417 | 0.510417 | 0.510417 | 0.510417 | 0 | 0 | 0 | 0 | 0.036199 | 221 | 2 | 136 | 110.5 | 0.901408 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
ce347d3b37d125628cb93d8e479a894844ffb2d6 | 64,344 | py | Python | schedules/tests.py | NadavK/djhome | 4f0d936dc475c91e0590bd22deae818cf2650840 | [
"MIT"
] | null | null | null | schedules/tests.py | NadavK/djhome | 4f0d936dc475c91e0590bd22deae818cf2650840 | [
"MIT"
] | 8 | 2020-02-11T23:59:46.000Z | 2022-03-03T21:49:33.000Z | schedules/tests.py | NadavK/djhome | 4f0d936dc475c91e0590bd22deae818cf2650840 | [
"MIT"
] | null | null | null | from django.contrib.auth.models import User
from django.test import TestCase
from dateutil.tz import tzlocal
from rest_framework.test import APIRequestFactory
from rest_framework.test import force_authenticate
from schedules.views import OnetimeScheduleViewSet
from .models import Schedule, OnetimeSchedule
from ios.models import Output
import datetime
class SchedulesTestCase(TestCase):
def setUp(self):
self.su = User.objects.create_superuser(username='admin', password='test', email='admin@mail.com')
#self.user1 = User.objects.create_user(username='user1', password='test')
#self.user2 = User.objects.create_user(username='user2', password='test')
#self.user3 = User.objects.create_user(username='user3', password='test')
#self.sched0 = Schedule.objects.create(ph_sn=1, index=0, input_type=Input.INPUT_TYPE_MAGNET, deleted=False, description='in 0')
#self.obj0.tags.add('Door')
#self.obj0.save()
#Schedule.objects.create(sun=True, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time=datetime.time(1, 1, 0)).save()
self.output = Output.objects.create(ph_sn=1, index=3, output_type=Output.OUTPUT_TYPE_BLIND_DOWN, deleted=False, description='out 3')
self.output.save()
self.output2 = Output.objects.create(ph_sn=2, index=3, output_type=Output.OUTPUT_TYPE_BLIND_DOWN, deleted=False, description='out 2')
self.output2.save()
return
self.sun = self.create_schedule(sun=True, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:01', turn_on=True, output=self.output)
self.mon = self.create_schedule(sun=False, mon=True, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:02', turn_on=True, output=self.output)
self.tue = self.create_schedule(sun=False, mon=False, tue=True, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:03', turn_on=True, output=self.output)
self.wed = self.create_schedule(sun=False, mon=False, tue=False, wed=True, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:04', turn_on=True, output=self.output)
self.thu = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=True, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:05', turn_on=True, output=self.output)
self.fri = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=True, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:06', turn_on=True, output=self.output)
self.fri_no_sukkot = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=True, but_only_secular_fri=False, sha=False, but_not_sukkot=True, time='1:07', turn_on=True, output=self.output)
self.sec_fri = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=True, but_only_secular_fri=True, sha=False, but_not_sukkot=False, time='1:08', turn_on=True, output=self.output)
self.sha = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=True, but_not_sukkot=False, time='1:09', turn_on=True, output=self.output)
self.sha_no_sukkot = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=True, but_not_sukkot=True, time='1:10', turn_on=True, output=self.output)
def x_test_none(self):
none = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=False, time='1:11', turn_on=True, output=self.output)
self.assertEqual(none.next_datetime(), None)
none = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=True, sha=False, but_not_sukkot=False, time='1:12', turn_on=True, output=self.output)
self.assertEqual(none.next_datetime(), None)
none = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=False, sha=False, but_not_sukkot=True, time='1:13', turn_on=True, output=self.output)
self.assertEqual(none.next_datetime(), None)
none = self.create_schedule(sun=False, mon=False, tue=False, wed=False, thu=False, fri=False, but_only_secular_fri=True, sha=False, but_not_sukkot=True, time='1:14', turn_on=True, output=self.output)
self.assertEqual(none.next_datetime(), None)
def create_schedule(self, *args, **kwargs):
obj = Schedule(*args, **kwargs)
obj.save(info_only=True)
return obj
def test_onetime_schedule_simple(self):
one = OnetimeSchedule(date=datetime.date(2017, 9, 19), start=datetime.time(0, 0, tzinfo=tzlocal()), end=datetime.time(23, 59, tzinfo=tzlocal()), segments='111111111111111111111111111111111111100001111111111111111111111111111111111111111111111111111111', output=self.output, active=True, deleted=False)
print('===========================================================================================================')
print('2017-09-18 0:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 18, 0, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), (datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), True))
print('2017-09-18 0:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 18, 0, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), (datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), True))
print('2017-09-19 0:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), (datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), True))
print('2017-09-19 0:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), (datetime.datetime(2017, 9, 19, 9, 15, tzinfo=tzlocal()), False))
print('2017-09-19 9:15 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 9, 15, tzinfo=tzlocal()), for_next_time=True, info_only=True), (datetime.datetime(2017, 9, 19, 10, 15, tzinfo=tzlocal()), True))
print('2017-09-19 10:15 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 10, 15, tzinfo=tzlocal()), for_next_time=True, info_only=True), None)
one = OnetimeSchedule(date=datetime.date(2017, 9, 19), start=datetime.time(0, 0, tzinfo=tzlocal()), end=datetime.time(23, 59, tzinfo=tzlocal()), segments='000000000000000000000000000000000000011110000000000000000000000000000000000000000000000000000000', output=self.output, active=True, deleted=False)
print('===========================================================================================================')
print('2017-09-18 0:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 18, 0, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), (datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), False))
print('2017-09-18 0:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 18, 0, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), (datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), False))
print('2017-09-19 0:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), (datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), False))
print('2017-09-19 0:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), (datetime.datetime(2017, 9, 19, 9, 15, tzinfo=tzlocal()), True))
print('2017-09-19 9:15 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 9, 15, tzinfo=tzlocal()), for_next_time=True, info_only=True), (datetime.datetime(2017, 9, 19, 10, 15, tzinfo=tzlocal()), False))
print('2017-09-19 10:15 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 10, 15, tzinfo=tzlocal()), for_next_time=True, info_only=True), None)
def x_test_onetime_schedule_startend_times(self):
one = OnetimeSchedule(date=datetime.date(2017, 9, 19), start=datetime.time(2, 14, tzinfo=tzlocal()), end=datetime.time(20, 44, tzinfo=tzlocal()), segments='', output=self.output, active=True, deleted=False)
print('===========================================================================================================')
print('2017-09-18 0:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 18, 0, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), datetime.datetime(2017, 9, 19, 2, 14, tzinfo=tzlocal()))
print('2017-09-18 0:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 18, 0, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), datetime.datetime(2017, 9, 19, 2, 14, tzinfo=tzlocal()))
print('2017-09-19 0:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 0, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), datetime.datetime(2017, 9, 19, 2, 14, tzinfo=tzlocal()))
print('2017-09-19 2:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 2, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), datetime.datetime(2017, 9, 19, 2, 14, tzinfo=tzlocal()))
print('2017-09-19 2:00 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 2, 0, tzinfo=tzlocal()), for_next_time=True, info_only=True), datetime.datetime(2017, 9, 19, 2, 15, tzinfo=tzlocal()))
print('2017-09-19 2:15 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 2, 15, tzinfo=tzlocal()), for_next_time=True, info_only=True), datetime.datetime(2017, 9, 19, 2, 30, tzinfo=tzlocal()))
print('2017-09-19 20:30')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 20, 30, tzinfo=tzlocal()), for_next_time=False, info_only=False), datetime.datetime(2017, 9, 19, 20, 30, tzinfo=tzlocal()))
print('2017-09-19 20:30 next')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 19, 20, 30, tzinfo=tzlocal()), for_next_time=True, info_only=True), None)
print('2017-09-20 1:00')
self.assertEqual(one._prepare_next(now=datetime.datetime(2017, 9, 20, 1, 0, tzinfo=tzlocal()), for_next_time=False, info_only=True), None)
print('===========================================================================================================')
#OneSched.save(info_only=True)
def X_test_onetimeschedule_and_yearlyschedule_overlap(self):
one = OnetimeSchedule(date=datetime.date(2017, 9, 19), start=datetime.time(0, 50, tzinfo=tzlocal()), end=datetime.time(0, 59, tzinfo=tzlocal()), segments='111111111011111111101111111110111111111011111111101111111110111111111011111111101111111110111110', output=self.output, active=True, deleted=False)
one.save(info_only=True)
two = OnetimeSchedule(date=datetime.date(2017, 9, 19), start=datetime.time(0, 55, tzinfo=tzlocal()), end=datetime.time(1, 1, tzinfo=tzlocal()), segments='111111111011111111101111111110111111111011111111101111111110111111111011111111101111111110111110', output=self.output2, active=True, deleted=False)
two.save(info_only=True)
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output, datetime.datetime(2017, 9, 19, 0, 49, tzinfo=tzlocal())))
self.assertTrue(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output, datetime.datetime(2017, 9, 19, 0, 50, tzinfo=tzlocal())))
self.assertTrue(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output, datetime.datetime(2017, 9, 19, 0, 51, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output, datetime.datetime(2017, 9, 19, 0, 59, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output, datetime.datetime(2017, 9, 19, 1, 0, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 0, 49, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 0, 50, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 0, 51, tzinfo=tzlocal())))
self.assertTrue(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 0, 59, tzinfo=tzlocal())))
self.assertTrue(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 1, 0, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 1, 1, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 19, 1, 2, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 0, 49, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 0, 50, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 0, 51, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 0, 59, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 1, 0, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 1, 1, tzinfo=tzlocal())))
self.assertFalse(OnetimeSchedule.objects.is_onetime_active_for_datetime(self.output2, datetime.datetime(2017, 9, 20, 1, 2, tzinfo=tzlocal())))
def X_test_api(self):
one = OnetimeSchedule(start=datetime.datetime(2017, 9, 19, 0, 50, tzinfo=tzlocal()), end=datetime.datetime(2017, 9, 19, 0, 59, tzinfo=tzlocal()), segments='111111111011111111101111111110111111111011111111101111111110111111111011111111101111111110111110', output=self.output, active=True, deleted=False)
one.save(info_only=True)
# Using the standard RequestFactory API to create a form POST request
factory = APIRequestFactory()
view = OnetimeScheduleViewSet.as_view({'get': 'list',
#'get': 'retrieve',
'post': 'create',
'put': 'update',
'patch': 'partial_update',
'delete': 'destroy'})
#request = factory.get('/api/onetimeschedules/?output='+str(self.output.pk))
request = factory.get('onetimeschedules', {'output': self.output.pk})
force_authenticate(request, user=self.su)
response = view(request)
response.render()
print(response)
import json
json = json.loads(response.content)
json[0]['segments'] = '011111111001111111101111111110111111111011111111101111111110111111111011111111101111111110111101'
print(json)
print(json[0])
print(json[0]['start'])
#request = factory.post('onetimeschedules', data=json[0])
#from django.test.client import encode_multipart
#content = encode_multipart('BoUnDaRyStRiNg', json[0])
#content_type = 'multipart/form-data; boundary=BoUnDaRyStRiNg'
#request = factory.put('/notes/547/', content, content_type=content_type)
#request = factory.put('onetimeschedules/' + str(json[0]['pk']) + '/', content, content_type=content_type)
#request = factory.put('onetimeschedules', content, content_type=content_type)
request = factory.put('onetimeschedules', data=json[0])
force_authenticate(request, user=self.su)
response = view(request, pk=json[0]['pk'])
response.render()
print(response)
request = factory.get('onetimeschedules', {'output': self.output.pk})
force_authenticate(request, user=self.su)
response = view(request)
response.render()
print(response)
def x_test_schedules(self):
self.validate_schedule(datetime.datetime (2017, 9, 19, 1, 0, tzinfo=tzlocal()), { #Tue, Day before Erev Rosh Hashana
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 19, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 20, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 20, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 20, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 21, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 21, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 20, 1, 0, tzinfo=tzlocal()), { #Erev Rosh Hashana
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 20, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 20, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 20, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 21, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 21, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 21, 1, 0, tzinfo=tzlocal()), { #Rosh Hashana 1 - 1:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 21, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 21, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 21, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 21, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 21, 2, 0, tzinfo=tzlocal()), { #Rosh Hashana 1 - 2:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 22, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 22, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 22, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 22, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 22, 1, 0, tzinfo=tzlocal()), { #Rosh Hashana 2 - 1:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 22, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 22, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 22, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 22, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 22, 2, 0, tzinfo=tzlocal()), { #Rosh Hashana 2 - 2:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 23, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 23, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 23, 1, 0, tzinfo=tzlocal()), { #Sha - Day after Rosh Hashana 2 - 1:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 23, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 23, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 23, 2, 0, tzinfo=tzlocal()), { #Sha - Day after Rosh Hashana 2 - 2:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 24, 1, 0, tzinfo=tzlocal()), { #Sun - 1:00
self.sun: datetime.datetime (2017, 9, 24, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 24, 2, 0, tzinfo=tzlocal()), { #Sun - 2:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 9, 25, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 9, 26, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 9, 27, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 28, 1, 0, tzinfo=tzlocal()), { #Thur - Day before Erev Sha/YomKippur - 1:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 9, 28, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 28, 2, 0, tzinfo=tzlocal()), { #Thur - Day before Erev Sha/YomKippur - 2:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()), #delayed due to Sukkot
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 29, 1, 0, tzinfo=tzlocal()), { #Fri - Erev Sha/YomKippur - 1:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 9, 29, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 9, 29, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 9, 29, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 29, 2, 0, tzinfo=tzlocal()), { #Fri - Erev Sha/YomKippur - 2:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 30, 1, 0, tzinfo=tzlocal()), { #Sha/YomKippur - 1:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 9, 30, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 9, 30, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 9, 30, 2, 0, tzinfo=tzlocal()), { #Sha/YomKippur - 2:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 1, 1, 0, tzinfo=tzlocal()), { #Sun - 1:00
self.sun: datetime.datetime (2017, 10, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 1, 2, 0, tzinfo=tzlocal()), { #Sun - 2:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 3, 1, 0, tzinfo=tzlocal()), { #Tue - Day before Erev Sukkot - 1:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 3, 2, 0, tzinfo=tzlocal()), { #Tue - Day before Erev Sukkot - 2:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 4, 1, 0, tzinfo=tzlocal()), { #Wed - Erev Sukkot - 1:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 4, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 4, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 4, 2, 0, tzinfo=tzlocal()), { #Wed - Erev Sukkot - 2:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 6, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 6, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 5, 1, 0, tzinfo=tzlocal()), { #Thur - Sukkot - 1:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 6, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 6, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 5, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 5, 2, 0, tzinfo=tzlocal()), { #Thur - Sukkot - 2:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 6, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 6, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 7, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 6, 1, 0, tzinfo=tzlocal()), { #Fri - Sukkot - 1:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 6, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 6, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 7, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 6, 2, 0, tzinfo=tzlocal()), { #Fri - Sukkot - 2:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 11, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 11, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 7, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 7, 1, 0, tzinfo=tzlocal()), { #Sha- Sukkot - 1:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 11, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 11, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 7, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 7, 2, 0, tzinfo=tzlocal()), { #Sha- Sukkot - 2:00
self.sun: datetime.datetime (2017, 10, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()), #delayed due to Erev Sukkot
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 11, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 11, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 12, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 11, 1, 0, tzinfo=tzlocal()), { #Wed- Erev Simchat Torah - 1:00
self.sun: datetime.datetime (2017, 10, 15, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 16, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 17, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 11, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 11, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 11, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 12, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 11, 2, 0, tzinfo=tzlocal()), { #Wed- Erev Simchat Torah - 2:00
self.sun: datetime.datetime (2017, 10, 15, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 16, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 17, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 13, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 13, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 13, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 12, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 12, 1, 0, tzinfo=tzlocal()), { #Thu- Simchat Torah - 1:00
self.sun: datetime.datetime (2017, 10, 15, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 16, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 17, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 13, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 13, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 13, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 12, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 12, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2017, 10, 12, 2, 0, tzinfo=tzlocal()), { #Thu- Simchat Torah - 2:00
self.sun: datetime.datetime (2017, 10, 15, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2017, 10, 16, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2017, 10, 17, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2017, 10, 18, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2017, 10, 19, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2017, 10, 13, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2017, 10, 13, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2017, 10, 13, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2017, 10, 14, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2017, 10, 14, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2018, 3, 29, 1, 0, tzinfo=tzlocal()), { #Thu- Day before Erev Pesach - 1:00
self.sun: datetime.datetime (2018, 4, 1, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2018, 4, 2, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2018, 4, 3, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2018, 4, 4, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2018, 3, 29, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2018, 3, 30, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2018, 3, 30, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2018, 3, 30, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2018, 3, 31, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2018, 3, 31, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2018, 4, 4, 1, 0, tzinfo=tzlocal()), { #Wed- Day before Erev Pesach II - 1:00
self.sun: datetime.datetime (2018, 4, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2018, 4, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2018, 4, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2018, 4, 4, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2018, 4, 12, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2018, 4, 5, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2018, 4, 5, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2018, 4, 5, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2018, 4, 6, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2018, 4, 6, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2018, 4, 6, 1, 0, tzinfo=tzlocal()), { #Fri- Pesach II - 1:00
self.sun: datetime.datetime (2018, 4, 8, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2018, 4, 9, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2018, 4, 10, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2018, 4, 11, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2018, 4, 12, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2018, 4, 6, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2018, 4, 6, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2018, 4, 13, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2018, 4, 6, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2018, 4, 6, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2018, 5, 19, 1, 0, tzinfo=tzlocal()), { #Sha- Erev Shavuot - 1:00
self.sun: datetime.datetime (2018, 5, 27, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2018, 5, 21, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2018, 5, 22, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2018, 5, 23, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2018, 5, 24, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2018, 5, 19, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2018, 5, 19, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2018, 5, 25, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2018, 5, 19, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2018, 5, 19, 1, 10, tzinfo=tzlocal())})
self.validate_schedule(datetime.datetime (2018, 5, 19, 2, 0, tzinfo=tzlocal()), { #Sha- Erev Shavuot - 2:00
self.sun: datetime.datetime (2018, 5, 27, 1, 1, tzinfo=tzlocal()),
self.mon: datetime.datetime (2018, 5, 21, 1, 2, tzinfo=tzlocal()),
self.tue: datetime.datetime (2018, 5, 22, 1, 3, tzinfo=tzlocal()),
self.wed: datetime.datetime (2018, 5, 23, 1, 4, tzinfo=tzlocal()),
self.thu: datetime.datetime (2018, 5, 24, 1, 5, tzinfo=tzlocal()),
self.fri: datetime.datetime (2018, 5, 25, 1, 6, tzinfo=tzlocal()),
self.fri_no_sukkot: datetime.datetime(2018, 5, 25, 1, 7, tzinfo=tzlocal()),
self.sec_fri: datetime.datetime (2018, 5, 25, 1, 8, tzinfo=tzlocal()),
self.sha: datetime.datetime (2018, 5, 20, 1, 9, tzinfo=tzlocal()),
self.sha_no_sukkot: datetime.datetime(2018, 5, 20, 1, 10, tzinfo=tzlocal())})
def validate_schedule(self, date, expected_schedule_dates):
count = 0
for schedule in Schedule.objects.all():
count += 1
schedule_time = schedule.next_datetime(date)
print('schedule for date: %s, schedule: %s, schedule_time: %s, expected_date: %s' % (date, schedule, schedule_time, expected_schedule_dates[schedule]))
self.assertEqual(schedule_time, expected_schedule_dates[schedule])
self.assertEqual(count, len(expected_schedule_dates))
def Xtest_bitmask_char(self):
def test_chars_bitmasks():
from common.utils import chars_to_bitmask, bitmask_to_chars
i = 0
while True:
binary1 = bin(i)[2:]
chars_of_4 = (len(binary1) // 32 + 1)
binary = binary1.zfill(chars_of_4 * 32)
if i % 10000 == 0:
print(i, chars_of_4, '"' + binary + '"')
if chars_to_bitmask(bitmask_to_chars(binary)) != binary:
print('ERROR')
print('"' + bitmask_to_chars(binary) + '"')
print('"' + chars_to_bitmask(bitmask_to_chars(binary)) + '"')
print(binary, i)
return
i += 1
test_chars_bitmasks()
def Xtest_bitmask_int(self):
def test_chars_bitmasks():
#from common.utils import bitmask_to_int
#i = pow(2,64)-1000
#i = pow(2,32)-1000
#i = pow(2,16)-1000
i = 0
while True:
binary1 = bin(i)[2:]
chars_of_4 = (len(binary1) // 32 + 1)
binary = binary1.zfill(chars_of_4 * 32)
if i % 10000 == 0:
print(i, chars_of_4, len(binary.lstrip('0')), '"' + binary + '"')
if bitmask_to_int(binary) != i:
print('ERROR')
print('ERROR', binary, i)
print('ERROR', bitmask_to_int(binary))
return
i += 1
#if i > 158456325028528675187087900672:
if i > 79228162514264337593543950336+1000:
print('END!!!')
return
test_chars_bitmasks()
# def old_test_schedule_runs(self):
# self.validate_schedule(datetime.date(2017, 9, 19), [self.tue]) #Day before Erev Rosh Hashana
# self.validate_schedule(datetime.date(2017, 9, 20), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev Rosh Hashana
#
# self.validate_schedule(datetime.date(2017, 9, 21), [self.sha, self.sha_no_sukkot, self.fri, self.fri_no_sukkot]) #Rosh Hashana
# self.validate_schedule(datetime.date(2017, 9, 22), [self.sha, self.sha_no_sukkot, self.fri, self.fri_no_sukkot]) #Rosh Hashana
# self.validate_schedule(datetime.date(2017, 9, 23), [self.sha, self.sha_no_sukkot]) #Shabbat after Hag
# self.validate_schedule(datetime.date(2017, 9, 24), [self.sun]) #Sunday
#
# self.validate_schedule(datetime.date(2017, 9, 28), [self.thu]) #Thur
# self.validate_schedule(datetime.date(2017, 9, 29), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev Shabbat/Kippur
# self.validate_schedule(datetime.date(2017, 9, 30), [self.sha, self.sha_no_sukkot]) #YomKippur & Shabbat
# self.validate_schedule(datetime.date(2017, 10, 1), [self.sun]) #Sunday
#
# self.validate_schedule(datetime.date(2017, 10, 3), [self.tue]) #Tue
# self.validate_schedule(datetime.date(2017, 10, 4), [self.sec_fri, self.fri]) #Erev Sukkot
# self.validate_schedule(datetime.date(2017, 10, 5), [self.sha]) #Sukkot
# self.validate_schedule(datetime.date(2017, 10, 6), [self.sec_fri, self.fri]) #Fri Sukkot
# self.validate_schedule(datetime.date(2017, 10, 7), [self.sha]) #Sha Sukkot
# self.validate_schedule(datetime.date(2017, 10, 8), [self.sun]) #Sun
# self.validate_schedule(datetime.date(2017, 10, 9), [self.mon]) #Mon
# self.validate_schedule(datetime.date(2017, 10, 10), [self.tue]) #Tue
# self.validate_schedule(datetime.date(2017, 10, 11), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev SimchatTorah
# self.validate_schedule(datetime.date(2017, 10, 12), [self.sha, self.sha_no_sukkot]) #SimchatTorah
# self.validate_schedule(datetime.date(2017, 10, 13), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Fri
# self.validate_schedule(datetime.date(2017, 10, 14), [self.sha, self.sha_no_sukkot]) #Sha
# self.validate_schedule(datetime.date(2017, 10, 15), [self.sun]) #Sun
#
# self.validate_schedule(datetime.date(2018, 3, 29), [self.thu]) #Thu
# self.validate_schedule(datetime.date(2018, 3, 30), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Fri Erev Pesach
# self.validate_schedule(datetime.date(2018, 3, 31), [self.sha, self.sha_no_sukkot]) #Sha/Pesach
# self.validate_schedule(datetime.date(2018, 4, 1), [self.sun]) #Sun
# self.validate_schedule(datetime.date(2018, 4, 2), [self.mon]) #Mon
# self.validate_schedule(datetime.date(2018, 4, 3), [self.tue]) #Tue
# self.validate_schedule(datetime.date(2018, 4, 4), [self.wed]) #Wed
# self.validate_schedule(datetime.date(2018, 4, 5), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev Hag
# self.validate_schedule(datetime.date(2018, 4, 6), [self.sha, self.sha_no_sukkot, self.fri, self.fri_no_sukkot]) #Hag Sheini
# self.validate_schedule(datetime.date(2018, 4, 7), [self.sha, self.sha_no_sukkot]) #Sha
# self.validate_schedule(datetime.date(2018, 4, 8), [self.sun]) #Sun
#
# self.validate_schedule(datetime.date(2018, 5, 17), [self.thu]) #Thu
# self.validate_schedule(datetime.date(2018, 5, 18), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Fri
# self.validate_schedule(datetime.date(2018, 5, 19), [self.sha, self.sha_no_sukkot, self.fri, self.fri_no_sukkot]) #Sha
# self.validate_schedule(datetime.date(2018, 5, 20), [self.sha, self.sha_no_sukkot]) #Shavuot
# self.validate_schedule(datetime.date(2018, 5, 21), [self.mon]) #Mon
#
# self.validate_schedule(datetime.date(2018, 9, 6), [self.thu]) #Thu
# self.validate_schedule(datetime.date(2018, 9, 7), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Fri
# self.validate_schedule(datetime.date(2018, 9, 8), [self.sha, self.sha_no_sukkot]) #Sha
# self.validate_schedule(datetime.date(2018, 9, 9), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev RoshHashana
# self.validate_schedule(datetime.date(2018, 9, 10), [self.sha, self.sha_no_sukkot, self.fri, self.fri_no_sukkot]) #RoshHashana
# self.validate_schedule(datetime.date(2018, 9, 11), [self.sha, self.sha_no_sukkot]) #RoshHashana
# self.validate_schedule(datetime.date(2018, 9, 12), [self.wed]) #Wed
#
# self.validate_schedule(datetime.date(2018, 9, 17), [self.mon]) #Mon
# self.validate_schedule(datetime.date(2018, 9, 18), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev YomKippur
# self.validate_schedule(datetime.date(2018, 9, 19), [self.sha, self.sha_no_sukkot]) #YomKippur
# self.validate_schedule(datetime.date(2018, 9, 20), [self.thu]) #Thur
# self.validate_schedule(datetime.date(2018, 9, 21), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Fri
# self.validate_schedule(datetime.date(2018, 9, 22), [self.sha, self.sha_no_sukkot]) #Sat
# self.validate_schedule(datetime.date(2018, 9, 23), [self.sec_fri, self.fri]) #Erev Sukkot
# self.validate_schedule(datetime.date(2018, 9, 24), [self.sha]) #Sukkot
# self.validate_schedule(datetime.date(2018, 9, 25), [self.tue]) #Tue
# self.validate_schedule(datetime.date(2018, 9, 26), [self.wed]) #Wed
# self.validate_schedule(datetime.date(2018, 9, 27), [self.thu]) #Thu
# self.validate_schedule(datetime.date(2018, 9, 28), [self.sec_fri, self.fri]) #Fri
# self.validate_schedule(datetime.date(2018, 9, 29), [self.sha]) #Sha Sukkot
# self.validate_schedule(datetime.date(2018, 9, 30), [self.sec_fri, self.fri, self.fri_no_sukkot]) #Erev SimchatTorah
# self.validate_schedule(datetime.date(2018, 10, 1), [self.sha, self.sha_no_sukkot]) #SimchatTorah
# self.validate_schedule(datetime.date(2018, 10, 2), [self.tue]) #Tue
#
# def validate_schedule_runs_old(self, date, expected_schedules):
# ScheduleRun.objects.prepare_runs_for_date(date)
# actual_ids = []
# for run in ScheduleRun.objects.all():
# actual_ids = actual_ids + [run.schedule.id]
# print(run)
# if len(expected_schedules) == 1:
# expected_schedules = [expected_schedules[0],]
# self.assertEqual(sorted(actual_ids), sorted(list(map(lambda x: x.id, expected_schedules)))) #extract the expected ids and sort them
# ScheduleRun.objects.all().delete()
#
| 81.966879 | 310 | 0.599046 | 8,557 | 64,344 | 4.411242 | 0.031904 | 0.163934 | 0.217766 | 0.126474 | 0.916523 | 0.904946 | 0.886057 | 0.863115 | 0.840809 | 0.799931 | 0 | 0.123456 | 0.243675 | 64,344 | 784 | 311 | 82.071429 | 0.652194 | 0.140712 | 0 | 0.677258 | 0 | 0.001672 | 0.031199 | 0.018233 | 0 | 0 | 0 | 0 | 0.076923 | 1 | 0.021739 | false | 0.001672 | 0.018395 | 0 | 0.050167 | 0.070234 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ce408d69061d9a94e982a7a8e746f2aa1c4e1922 | 10,166 | py | Python | yandex_kassa/tests/test_notification.py | ZarinaAfl/django-yandex-kassa | da1f68c7d3fa5976aa9071052c2767fa676b1470 | [
"MIT"
] | 28 | 2015-12-28T12:49:11.000Z | 2020-12-16T02:38:58.000Z | yandex_kassa/tests/test_notification.py | ZarinaAfl/django-yandex-kassa | da1f68c7d3fa5976aa9071052c2767fa676b1470 | [
"MIT"
] | 11 | 2015-12-13T16:50:17.000Z | 2017-09-20T11:48:14.000Z | yandex_kassa/tests/test_notification.py | ZarinaAfl/django-yandex-kassa | da1f68c7d3fa5976aa9071052c2767fa676b1470 | [
"MIT"
] | 24 | 2016-04-08T12:42:27.000Z | 2021-03-26T07:03:19.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from random import randint
from app.models import Order
from .base_test import BaseTest
from django.test import override_settings
from django.core.urlresolvers import reverse
from yandex_kassa.forms import CheckOrderForm
from yandex_kassa.models import Payment
@override_settings(YANDEX_KASSA_SHOP_ID=123)
class TestCheckOrder(BaseTest):
def setUp(self):
self.url = reverse('kassa_check_order')
self.post_params = dict(
requestDatetime='2011-05-04T20:38:00.000+04:00',
action='checkOrder',
shopId=123,
shopArticleId=456,
invoiceId=1234567,
customerNumber=8123294469,
orderCreatedDatetime='2011-05-04T20:38:00.000+04:00',
orderSumAmount=87.10,
orderSumCurrencyPaycash=643,
orderSumBankPaycash=1001,
shopSumAmount=86.23,
shopSumCurrencyPaycash=643,
shopSumBankPaycash=1001,
paymentPayerCode='42007148320',
paymentType='AC',
)
def test_check_order(self):
params = self.post_params.copy()
params['md5'] = CheckOrderForm.make_md5(params)
count = randint(1, 5)
item = self.get_item()
payment = Payment.objects.create(shop_id=params['shopId'],
customer_number=params['customerNumber'],
invoice_id=params['invoiceId'],
order_amount=params['orderSumAmount'],
order_currency=params['orderSumCurrencyPaycash'],
payment_type=params['paymentType'])
Order.objects.create(item=item, count=count,
amount=int(params['orderSumAmount']), payment=payment)
res = self.app.post(self.url, params=params)
self.assertEquals(res.status_code, 200, 'HTTP code is not 200')
payment = Payment.objects.get(pk=payment.pk)
self.assertEquals(payment.status, Payment.STATUS.PROCESSED, 'Status is not set to "PROCESSED"')
self.assertEquals(float(payment.shop_amount), params['shopSumAmount'], 'Shop amount was not changed')
self.assertIsNotNone(payment.performed_datetime, 'Performed time was not set')
def test_bad_form(self):
params = self.post_params.copy()
params['md5'] = CheckOrderForm.make_md5(params)
count = randint(1, 5)
item = self.get_item()
payment = Payment.objects.create(shop_id=params['shopId'],
customer_number=params['customerNumber'],
invoice_id=params['invoiceId'],
order_amount=params['orderSumAmount'],
order_currency=params['orderSumCurrencyPaycash'],
payment_type=params['paymentType'])
Order.objects.create(item=item, count=count,
amount=int(params['orderSumAmount']), payment=payment)
del params['orderSumAmount'] # Made form data as wrong
res = self.app.post(self.url, params=params)
self.assertEquals(res.status_code, 200, 'HTTP code is not 200')
body = b'<?xml version="1.0" encoding="UTF-8"?>\n<checkOrderResponse code="200" />'
self.assertEqual(body, res.body, 'Body is not contains code="200"')
payment = Payment.objects.get(pk=payment.pk)
self.assertEquals(payment.status, Payment.STATUS.FAIL, 'Status is not set to "FAIL"')
self.assertIsNone(payment.shop_amount, 'Shop amount was set for wrond form data')
self.assertIsNone(payment.performed_datetime, 'Performed time was set for wrond form data')
def test_bad_md5(self):
params = self.post_params.copy()
params['md5'] = '4239da87569c3d29b7d712873561336c' # Bas MD5 hash
count = randint(1, 5)
item = self.get_item()
payment = Payment.objects.create(shop_id=params['shopId'],
customer_number=params['customerNumber'],
invoice_id=params['invoiceId'],
order_amount=params['orderSumAmount'],
order_currency=params['orderSumCurrencyPaycash'],
payment_type=params['paymentType'])
Order.objects.create(item=item, count=count,
amount=int(params['orderSumAmount']), payment=payment)
res = self.app.post(self.url, params=params)
self.assertEquals(res.status_code, 200, 'HTTP code is not 200')
body = b'<?xml version="1.0" encoding="UTF-8"?>\n<checkOrderResponse code="1" />'
self.assertEqual(body, res.body, 'Body is not contains code="1"')
payment = Payment.objects.get(pk=payment.pk)
self.assertEquals(payment.status, Payment.STATUS.PROCESSED, 'Status is not set to "FAIL"')
self.assertIsNone(payment.shop_amount, 'Shop amount was set for wrond MD5 hash')
self.assertIsNone(payment.performed_datetime, 'Performed time was set for wrond MD5 hash')
@override_settings(YANDEX_KASSA_SHOP_ID=123)
class PaymentAvisioTest(BaseTest):
def setUp(self):
self.url = reverse('kassa_payment_aviso')
self.post_params = dict(
requestDatetime='2011-05-04T20:38:00.000+04:00',
action='paymentAviso',
shopId=123,
shopArticleId=456,
invoiceId=1234567,
customerNumber=8123294469,
orderCreatedDatetime='2011-05-04T20:38:00.000+04:00',
orderSumAmount=87.10,
orderSumCurrencyPaycash=643,
orderSumBankPaycash=1001,
shopSumAmount=86.23,
shopSumCurrencyPaycash=643,
shopSumBankPaycash=1001,
paymentDatetime='2011-05-04T20:38:10.000+04:00',
paymentPayerCode=42007148320,
paymentType='AC',
cps_user_country_code='RU',
MyField='Добавленное магазином поле',
)
def test_payment_aviso(self):
params = self.post_params.copy()
params['md5'] = CheckOrderForm.make_md5(params)
count = randint(1, 5)
item = self.get_item()
payment = Payment.objects.create(shop_id=params['shopId'],
customer_number=params['customerNumber'],
invoice_id=params['invoiceId'],
order_amount=params['orderSumAmount'],
order_currency=params['orderSumCurrencyPaycash'],
payment_type=params['paymentType'])
Order.objects.create(item=item, count=count,
amount=int(params['orderSumAmount']), payment=payment)
res = self.app.post(self.url, params=params)
self.assertEquals(res.status_code, 200, 'HTTP code is not 200')
payment = Payment.objects.get(pk=payment.pk)
self.assertEquals(payment.status, Payment.STATUS.SUCCESS, 'Status is not set to "SUCCESS"')
def test_bad_md5(self):
params = self.post_params.copy()
params['md5'] = '4239da87569c3d29b7d712873561336c' # Bas MD5 hash
count = randint(1, 5)
item = self.get_item()
payment = Payment.objects.create(shop_id=params['shopId'],
customer_number=params['customerNumber'],
invoice_id=params['invoiceId'],
order_amount=params['orderSumAmount'],
order_currency=params['orderSumCurrencyPaycash'],
payment_type=params['paymentType'])
Order.objects.create(item=item, count=count,
amount=int(params['orderSumAmount']), payment=payment)
res = self.app.post(self.url, params=params)
self.assertEquals(res.status_code, 200, 'HTTP code is not 200')
body = '<?xml version="1.0" encoding="UTF-8"?>\n<paymentAvisoResponse code="1" message="Ошибка при проверке MD5 платеж #8123294469" />'.encode('utf-8')
self.assertEqual(body, res.body, 'Body is not contains code="1"')
payment = Payment.objects.get(pk=payment.pk)
self.assertEquals(payment.status, Payment.STATUS.PROCESSED, 'Status is not set to "PROCESSED"')
def test_bad_form(self):
params = self.post_params.copy()
params['md5'] = CheckOrderForm.make_md5(params)
count = randint(1, 5)
item = self.get_item()
payment = Payment.objects.create(shop_id=params['shopId'],
customer_number=params['customerNumber'],
invoice_id=params['invoiceId'],
order_amount=params['orderSumAmount'],
order_currency=params['orderSumCurrencyPaycash'],
payment_type=params['paymentType'])
Order.objects.create(item=item, count=count,
amount=int(params['orderSumAmount']), payment=payment)
del params['orderSumAmount'] # Made form data as wrong
res = self.app.post(self.url, params=params)
self.assertEquals(res.status_code, 200, 'HTTP code is not 200')
body = b'<?xml version="1.0" encoding="UTF-8"?>\n<paymentAvisoResponse code="200" />'
self.assertEqual(body, res.body, 'Body is not contains code="200"')
payment = Payment.objects.get(pk=payment.pk)
self.assertEquals(payment.status, Payment.STATUS.FAIL, 'Status is not set to "FAIL"')
self.assertIsNone(payment.shop_amount, 'Shop amount was set for wrond form data')
self.assertIsNone(payment.performed_datetime, 'Performed time was set for wrond form data') | 47.95283 | 159 | 0.588432 | 1,048 | 10,166 | 5.608779 | 0.146947 | 0.042872 | 0.042872 | 0.018374 | 0.87819 | 0.875468 | 0.866281 | 0.866281 | 0.839061 | 0.839061 | 0 | 0.053405 | 0.303758 | 10,166 | 212 | 160 | 47.95283 | 0.777056 | 0.009345 | 0 | 0.809249 | 0 | 0.023121 | 0.196106 | 0.050368 | 0 | 0 | 0 | 0 | 0.138728 | 1 | 0.046243 | false | 0 | 0.046243 | 0 | 0.104046 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
cbee7ebade224632f346f7046c2c61a77b19ae56 | 9,746 | py | Python | pyaz/appconfig/feature/filter/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/appconfig/feature/filter/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/appconfig/feature/filter/__init__.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | '''
Manage filters associated with feature flags stored in an App Configuration.
'''
from .... pyaz_utils import _call_az
def add(filter_name, auth_mode=None, connection_string=None, endpoint=None, feature=None, filter_parameters=None, index=None, key=None, label=None, name=None, yes=None):
'''
Add a filter to a feature flag.
Required Parameters:
- filter_name -- Name of the filter to be added.
Optional Parameters:
- auth_mode -- This parameter can be used for indicating how a data operation is to be authorized. If the auth mode is "key", provide connection string or store name and your account access keys will be retrieved for authorization. If the auth mode is "login", provide the store endpoint or store name and your "az login" credentials will be used for authorization. You can configure the default auth mode using `az configure --defaults appconfig_auth_mode=<auth_mode>`. For more information, see https://docs.microsoft.com/azure/azure-app-configuration/concept-enable-rbac
- connection_string -- Combination of access key and endpoint of App Configuration. Can be found using 'az appconfig credential list'. Users can preset it using `az configure --defaults appconfig_connection_string=<connection_string>` or environment variable with the name AZURE_APPCONFIG_CONNECTION_STRING.
- endpoint -- If auth mode is "login", provide endpoint URL of the App Configuration. The endpoint can be retrieved using "az appconfig show" command. You can configure the default endpoint using `az configure --defaults appconfig_endpoint=<endpoint>`
- feature -- Name of the feature to which you want to add the filter. If the feature flag key is different from the default key, provide the `--key` argument instead.
- filter_parameters -- Space-separated filter parameters in 'name[=value]' format. The value must be an escaped JSON string.
- index -- Zero-based index in the list of filters where you want to insert the new filter. If no index is specified or index is invalid, filter will be added to the end of the list.
- key -- Key of the feature flag. Key must start with the ".appconfig.featureflag/" prefix. Key cannot contain the "%" character. If both key and feature arguments are provided, only key will be used. Default key is the reserved prefix ".appconfig.featureflag/" + feature name.
- label -- If no label specified, add to the feature flag with null label by default.
- name -- Name of the App Configuration. You can configure the default name using `az configure --defaults app_configuration_store=<name>`
- yes -- Do not prompt for confirmation.
'''
return _call_az("az appconfig feature filter add", locals())
def delete(all=None, auth_mode=None, connection_string=None, endpoint=None, feature=None, filter_name=None, index=None, key=None, label=None, name=None, yes=None):
'''
Delete a filter from a feature flag.
Optional Parameters:
- all -- Delete all filters associated with a feature flag.
- auth_mode -- This parameter can be used for indicating how a data operation is to be authorized. If the auth mode is "key", provide connection string or store name and your account access keys will be retrieved for authorization. If the auth mode is "login", provide the store endpoint or store name and your "az login" credentials will be used for authorization. You can configure the default auth mode using `az configure --defaults appconfig_auth_mode=<auth_mode>`. For more information, see https://docs.microsoft.com/azure/azure-app-configuration/concept-enable-rbac
- connection_string -- Combination of access key and endpoint of App Configuration. Can be found using 'az appconfig credential list'. Users can preset it using `az configure --defaults appconfig_connection_string=<connection_string>` or environment variable with the name AZURE_APPCONFIG_CONNECTION_STRING.
- endpoint -- If auth mode is "login", provide endpoint URL of the App Configuration. The endpoint can be retrieved using "az appconfig show" command. You can configure the default endpoint using `az configure --defaults appconfig_endpoint=<endpoint>`
- feature -- Name of the feature from which you want to delete the filter. If the feature flag key is different from the default key, provide the `--key` argument instead.
- filter_name -- Name of the filter to be deleted.
- index -- Zero-based index of the filter to be deleted in case there are multiple instances with same filter name.
- key -- Key of the feature flag. Key must start with the ".appconfig.featureflag/" prefix. Key cannot contain the "%" character. If both key and feature arguments are provided, only key will be used. Default key is the reserved prefix ".appconfig.featureflag/" + feature name.
- label -- If no label specified, delete from the feature flag with null label by default.
- name -- Name of the App Configuration. You can configure the default name using `az configure --defaults app_configuration_store=<name>`
- yes -- Do not prompt for confirmation.
'''
return _call_az("az appconfig feature filter delete", locals())
def show(filter_name, auth_mode=None, connection_string=None, endpoint=None, feature=None, index=None, key=None, label=None, name=None):
'''
Show filters of a feature flag.
Required Parameters:
- filter_name -- Name of the filter to be displayed.
Optional Parameters:
- auth_mode -- This parameter can be used for indicating how a data operation is to be authorized. If the auth mode is "key", provide connection string or store name and your account access keys will be retrieved for authorization. If the auth mode is "login", provide the store endpoint or store name and your "az login" credentials will be used for authorization. You can configure the default auth mode using `az configure --defaults appconfig_auth_mode=<auth_mode>`. For more information, see https://docs.microsoft.com/azure/azure-app-configuration/concept-enable-rbac
- connection_string -- Combination of access key and endpoint of App Configuration. Can be found using 'az appconfig credential list'. Users can preset it using `az configure --defaults appconfig_connection_string=<connection_string>` or environment variable with the name AZURE_APPCONFIG_CONNECTION_STRING.
- endpoint -- If auth mode is "login", provide endpoint URL of the App Configuration. The endpoint can be retrieved using "az appconfig show" command. You can configure the default endpoint using `az configure --defaults appconfig_endpoint=<endpoint>`
- feature -- Name of the feature which contains the filter. If the feature flag key is different from the default key, provide the `--key` argument instead.
- index -- Zero-based index of the filter to be displayed in case there are multiple instances with same filter name.
- key -- Key of the feature flag. Key must start with the ".appconfig.featureflag/" prefix. Key cannot contain the "%" character. If both key and feature arguments are provided, only key will be used. Default key is the reserved prefix ".appconfig.featureflag/" + feature name.
- label -- If no label specified, show the feature flag with null label by default.
- name -- Name of the App Configuration. You can configure the default name using `az configure --defaults app_configuration_store=<name>`
'''
return _call_az("az appconfig feature filter show", locals())
def list(all=None, auth_mode=None, connection_string=None, endpoint=None, feature=None, key=None, label=None, name=None, top=None):
'''
List all filters for a feature flag.
Optional Parameters:
- all -- List all filters associated with a feature flag.
- auth_mode -- This parameter can be used for indicating how a data operation is to be authorized. If the auth mode is "key", provide connection string or store name and your account access keys will be retrieved for authorization. If the auth mode is "login", provide the store endpoint or store name and your "az login" credentials will be used for authorization. You can configure the default auth mode using `az configure --defaults appconfig_auth_mode=<auth_mode>`. For more information, see https://docs.microsoft.com/azure/azure-app-configuration/concept-enable-rbac
- connection_string -- Combination of access key and endpoint of App Configuration. Can be found using 'az appconfig credential list'. Users can preset it using `az configure --defaults appconfig_connection_string=<connection_string>` or environment variable with the name AZURE_APPCONFIG_CONNECTION_STRING.
- endpoint -- If auth mode is "login", provide endpoint URL of the App Configuration. The endpoint can be retrieved using "az appconfig show" command. You can configure the default endpoint using `az configure --defaults appconfig_endpoint=<endpoint>`
- feature -- Name of the feature whose filters you want to be displayed. If the feature flag key is different from the default key, provide the `--key` argument instead.
- key -- Key of the feature flag. Key must start with the ".appconfig.featureflag/" prefix. Key cannot contain the "%" character. If both key and feature arguments are provided, only key will be used. Default key is the reserved prefix ".appconfig.featureflag/" + feature name.
- label -- If no label specified, display filters from the feature flag with null label by default.
- name -- Name of the App Configuration. You can configure the default name using `az configure --defaults app_configuration_store=<name>`
- top -- Maximum number of items to return. Must be a positive integer. Default to 100.
'''
return _call_az("az appconfig feature filter list", locals())
| 114.658824 | 577 | 0.762774 | 1,459 | 9,746 | 5.038382 | 0.106237 | 0.034825 | 0.034825 | 0.052238 | 0.9094 | 0.9094 | 0.896069 | 0.883009 | 0.87988 | 0.866141 | 0 | 0.00037 | 0.16694 | 9,746 | 84 | 578 | 116.02381 | 0.905038 | 0.878309 | 0 | 0 | 0 | 0 | 0.138264 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.444444 | false | 0 | 0.111111 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
cbfaad74a38c8e25a30ae54332cbd7b0005b197e | 2,197 | py | Python | storyscript_mathparse/nodes.py | MayuSakurai/StoryScript | 237002caa78bd36b35b3819eb36df53755e532f5 | [
"MIT"
] | 17 | 2021-06-30T02:55:43.000Z | 2022-03-25T07:29:50.000Z | storyscript_mathparse/nodes.py | MayuSakurai/StoryScript | 237002caa78bd36b35b3819eb36df53755e532f5 | [
"MIT"
] | 19 | 2021-06-16T14:34:18.000Z | 2021-12-05T23:28:15.000Z | storyscript_mathparse/nodes.py | MayuSakurai/StoryScript | 237002caa78bd36b35b3819eb36df53755e532f5 | [
"MIT"
] | 5 | 2021-06-29T10:08:23.000Z | 2022-01-01T21:18:17.000Z | from dataclasses import dataclass
@dataclass
class NumberNode:
value: float
def __repr__(self):
return str(self.value)
@dataclass
class StringNode:
value: str
def __repr__(self):
return f'"{self.value}"'
@dataclass
class AddNode:
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a}+{self.node_b})"
@dataclass
class SubtractNode:
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a}-{self.node_b})"
@dataclass
class MultiplyNode:
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a}*{self.node_b})"
@dataclass
class DivideNode:
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a}/{self.node_b})"
@dataclass
class ModuloNode:
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a}%{self.node_b})"
@dataclass
class PowerNode:
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a}**{self.node_b})"
@dataclass
class PlusNode:
node: any
def __repr__(self):
return f"(+{self.node})"
@dataclass
class MinusNode:
node: any
def __repr__(self):
return f"(-{self.node})"
@dataclass
class BWLeftShiftNode: # Bitwise Left shift node
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a} << {self.node_b})"
@dataclass
class BWRightShiftNode: # Bitwise Right shift node
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a} >> {self.node_b})"
@dataclass
class BWOrNode: # Bitwise OR node
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a} | {self.node_b})"
@dataclass
class BWAndNode: # Bitwise AND node
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a} & {self.node_b})"
@dataclass
class BWXorNode: # Bitwise XOR node
node_a: any
node_b: any
def __repr__(self):
return f"({self.node_a} ^ {self.node_b})"
@dataclass
class BWNotNode: # Bitwise NOT node
node: any
def __repr__(self):
return f"(~{self.node})"
| 15.58156 | 51 | 0.621757 | 304 | 2,197 | 4.138158 | 0.134868 | 0.158983 | 0.139905 | 0.216216 | 0.719396 | 0.719396 | 0.701908 | 0.701908 | 0.701908 | 0.675676 | 0 | 0 | 0.254893 | 2,197 | 140 | 52 | 15.692857 | 0.768479 | 0.052344 | 0 | 0.619565 | 0 | 0 | 0.186988 | 0.084337 | 0 | 0 | 0 | 0 | 0 | 1 | 0.173913 | false | 0 | 0.01087 | 0.173913 | 0.826087 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 8 |
cbfc7452f297ff3d4562741e5a19c9bf83b8e7e8 | 33,277 | py | Python | sdk/python/pulumi_openstack/vpnaas/ike_policy.py | pulumi/pulumi-openstack | 945eed22a82784e9f0b3aa56168b2397c2f503e8 | [
"ECL-2.0",
"Apache-2.0"
] | 34 | 2018-09-12T12:37:51.000Z | 2022-02-04T19:32:13.000Z | sdk/python/pulumi_openstack/vpnaas/ike_policy.py | pulumi/pulumi-openstack | 945eed22a82784e9f0b3aa56168b2397c2f503e8 | [
"ECL-2.0",
"Apache-2.0"
] | 72 | 2018-08-15T13:04:57.000Z | 2022-03-31T15:39:49.000Z | sdk/python/pulumi_openstack/vpnaas/ike_policy.py | pulumi/pulumi-openstack | 945eed22a82784e9f0b3aa56168b2397c2f503e8 | [
"ECL-2.0",
"Apache-2.0"
] | 7 | 2019-03-14T08:28:49.000Z | 2021-12-29T04:23:55.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['IkePolicyArgs', 'IkePolicy']
@pulumi.input_type
class IkePolicyArgs:
def __init__(__self__, *,
auth_algorithm: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
encryption_algorithm: Optional[pulumi.Input[str]] = None,
ike_version: Optional[pulumi.Input[str]] = None,
lifetimes: Optional[pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
pfs: Optional[pulumi.Input[str]] = None,
phase1_negotiation_mode: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
The set of arguments for constructing a IkePolicy resource.
:param pulumi.Input[str] auth_algorithm: The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
:param pulumi.Input[str] description: The human-readable description for the policy.
Changing this updates the description of the existing policy.
:param pulumi.Input[str] encryption_algorithm: The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
:param pulumi.Input[str] ike_version: The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
:param pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]] lifetimes: The lifetime of the security association. Consists of Unit and Value.
:param pulumi.Input[str] name: The name of the policy. Changing this updates the name of
the existing policy.
:param pulumi.Input[str] pfs: The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
:param pulumi.Input[str] phase1_negotiation_mode: The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
:param pulumi.Input[str] tenant_id: The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
if auth_algorithm is not None:
pulumi.set(__self__, "auth_algorithm", auth_algorithm)
if description is not None:
pulumi.set(__self__, "description", description)
if encryption_algorithm is not None:
pulumi.set(__self__, "encryption_algorithm", encryption_algorithm)
if ike_version is not None:
pulumi.set(__self__, "ike_version", ike_version)
if lifetimes is not None:
pulumi.set(__self__, "lifetimes", lifetimes)
if name is not None:
pulumi.set(__self__, "name", name)
if pfs is not None:
pulumi.set(__self__, "pfs", pfs)
if phase1_negotiation_mode is not None:
pulumi.set(__self__, "phase1_negotiation_mode", phase1_negotiation_mode)
if region is not None:
pulumi.set(__self__, "region", region)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if value_specs is not None:
pulumi.set(__self__, "value_specs", value_specs)
@property
@pulumi.getter(name="authAlgorithm")
def auth_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
"""
return pulumi.get(self, "auth_algorithm")
@auth_algorithm.setter
def auth_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auth_algorithm", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable description for the policy.
Changing this updates the description of the existing policy.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="encryptionAlgorithm")
def encryption_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
"""
return pulumi.get(self, "encryption_algorithm")
@encryption_algorithm.setter
def encryption_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "encryption_algorithm", value)
@property
@pulumi.getter(name="ikeVersion")
def ike_version(self) -> Optional[pulumi.Input[str]]:
"""
The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
"""
return pulumi.get(self, "ike_version")
@ike_version.setter
def ike_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ike_version", value)
@property
@pulumi.getter
def lifetimes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]]]:
"""
The lifetime of the security association. Consists of Unit and Value.
"""
return pulumi.get(self, "lifetimes")
@lifetimes.setter
def lifetimes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]]]):
pulumi.set(self, "lifetimes", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the policy. Changing this updates the name of
the existing policy.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def pfs(self) -> Optional[pulumi.Input[str]]:
"""
The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
"""
return pulumi.get(self, "pfs")
@pfs.setter
def pfs(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pfs", value)
@property
@pulumi.getter(name="phase1NegotiationMode")
def phase1_negotiation_mode(self) -> Optional[pulumi.Input[str]]:
"""
The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
"""
return pulumi.get(self, "phase1_negotiation_mode")
@phase1_negotiation_mode.setter
def phase1_negotiation_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phase1_negotiation_mode", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
@value_specs.setter
def value_specs(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "value_specs", value)
@pulumi.input_type
class _IkePolicyState:
def __init__(__self__, *,
auth_algorithm: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
encryption_algorithm: Optional[pulumi.Input[str]] = None,
ike_version: Optional[pulumi.Input[str]] = None,
lifetimes: Optional[pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
pfs: Optional[pulumi.Input[str]] = None,
phase1_negotiation_mode: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None):
"""
Input properties used for looking up and filtering IkePolicy resources.
:param pulumi.Input[str] auth_algorithm: The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
:param pulumi.Input[str] description: The human-readable description for the policy.
Changing this updates the description of the existing policy.
:param pulumi.Input[str] encryption_algorithm: The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
:param pulumi.Input[str] ike_version: The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
:param pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]] lifetimes: The lifetime of the security association. Consists of Unit and Value.
:param pulumi.Input[str] name: The name of the policy. Changing this updates the name of
the existing policy.
:param pulumi.Input[str] pfs: The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
:param pulumi.Input[str] phase1_negotiation_mode: The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
:param pulumi.Input[str] tenant_id: The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
if auth_algorithm is not None:
pulumi.set(__self__, "auth_algorithm", auth_algorithm)
if description is not None:
pulumi.set(__self__, "description", description)
if encryption_algorithm is not None:
pulumi.set(__self__, "encryption_algorithm", encryption_algorithm)
if ike_version is not None:
pulumi.set(__self__, "ike_version", ike_version)
if lifetimes is not None:
pulumi.set(__self__, "lifetimes", lifetimes)
if name is not None:
pulumi.set(__self__, "name", name)
if pfs is not None:
pulumi.set(__self__, "pfs", pfs)
if phase1_negotiation_mode is not None:
pulumi.set(__self__, "phase1_negotiation_mode", phase1_negotiation_mode)
if region is not None:
pulumi.set(__self__, "region", region)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if value_specs is not None:
pulumi.set(__self__, "value_specs", value_specs)
@property
@pulumi.getter(name="authAlgorithm")
def auth_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
"""
return pulumi.get(self, "auth_algorithm")
@auth_algorithm.setter
def auth_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auth_algorithm", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable description for the policy.
Changing this updates the description of the existing policy.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="encryptionAlgorithm")
def encryption_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
"""
return pulumi.get(self, "encryption_algorithm")
@encryption_algorithm.setter
def encryption_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "encryption_algorithm", value)
@property
@pulumi.getter(name="ikeVersion")
def ike_version(self) -> Optional[pulumi.Input[str]]:
"""
The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
"""
return pulumi.get(self, "ike_version")
@ike_version.setter
def ike_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ike_version", value)
@property
@pulumi.getter
def lifetimes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]]]:
"""
The lifetime of the security association. Consists of Unit and Value.
"""
return pulumi.get(self, "lifetimes")
@lifetimes.setter
def lifetimes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['IkePolicyLifetimeArgs']]]]):
pulumi.set(self, "lifetimes", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the policy. Changing this updates the name of
the existing policy.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def pfs(self) -> Optional[pulumi.Input[str]]:
"""
The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
"""
return pulumi.get(self, "pfs")
@pfs.setter
def pfs(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pfs", value)
@property
@pulumi.getter(name="phase1NegotiationMode")
def phase1_negotiation_mode(self) -> Optional[pulumi.Input[str]]:
"""
The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
"""
return pulumi.get(self, "phase1_negotiation_mode")
@phase1_negotiation_mode.setter
def phase1_negotiation_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "phase1_negotiation_mode", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
@value_specs.setter
def value_specs(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "value_specs", value)
class IkePolicy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auth_algorithm: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
encryption_algorithm: Optional[pulumi.Input[str]] = None,
ike_version: Optional[pulumi.Input[str]] = None,
lifetimes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IkePolicyLifetimeArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
pfs: Optional[pulumi.Input[str]] = None,
phase1_negotiation_mode: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
"""
Manages a V2 Neutron IKE policy resource within OpenStack.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
policy1 = openstack.vpnaas.IkePolicy("policy1")
```
## Import
Services can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:vpnaas/ikePolicy:IkePolicy policy_1 832cb7f3-59fe-40cf-8f64-8350ffc03272
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] auth_algorithm: The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
:param pulumi.Input[str] description: The human-readable description for the policy.
Changing this updates the description of the existing policy.
:param pulumi.Input[str] encryption_algorithm: The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
:param pulumi.Input[str] ike_version: The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IkePolicyLifetimeArgs']]]] lifetimes: The lifetime of the security association. Consists of Unit and Value.
:param pulumi.Input[str] name: The name of the policy. Changing this updates the name of
the existing policy.
:param pulumi.Input[str] pfs: The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
:param pulumi.Input[str] phase1_negotiation_mode: The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
:param pulumi.Input[str] tenant_id: The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[IkePolicyArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a V2 Neutron IKE policy resource within OpenStack.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
policy1 = openstack.vpnaas.IkePolicy("policy1")
```
## Import
Services can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:vpnaas/ikePolicy:IkePolicy policy_1 832cb7f3-59fe-40cf-8f64-8350ffc03272
```
:param str resource_name: The name of the resource.
:param IkePolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(IkePolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
auth_algorithm: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
encryption_algorithm: Optional[pulumi.Input[str]] = None,
ike_version: Optional[pulumi.Input[str]] = None,
lifetimes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IkePolicyLifetimeArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
pfs: Optional[pulumi.Input[str]] = None,
phase1_negotiation_mode: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = IkePolicyArgs.__new__(IkePolicyArgs)
__props__.__dict__["auth_algorithm"] = auth_algorithm
__props__.__dict__["description"] = description
__props__.__dict__["encryption_algorithm"] = encryption_algorithm
__props__.__dict__["ike_version"] = ike_version
__props__.__dict__["lifetimes"] = lifetimes
__props__.__dict__["name"] = name
__props__.__dict__["pfs"] = pfs
__props__.__dict__["phase1_negotiation_mode"] = phase1_negotiation_mode
__props__.__dict__["region"] = region
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["value_specs"] = value_specs
super(IkePolicy, __self__).__init__(
'openstack:vpnaas/ikePolicy:IkePolicy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
auth_algorithm: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
encryption_algorithm: Optional[pulumi.Input[str]] = None,
ike_version: Optional[pulumi.Input[str]] = None,
lifetimes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IkePolicyLifetimeArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
pfs: Optional[pulumi.Input[str]] = None,
phase1_negotiation_mode: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'IkePolicy':
"""
Get an existing IkePolicy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] auth_algorithm: The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
:param pulumi.Input[str] description: The human-readable description for the policy.
Changing this updates the description of the existing policy.
:param pulumi.Input[str] encryption_algorithm: The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
:param pulumi.Input[str] ike_version: The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['IkePolicyLifetimeArgs']]]] lifetimes: The lifetime of the security association. Consists of Unit and Value.
:param pulumi.Input[str] name: The name of the policy. Changing this updates the name of
the existing policy.
:param pulumi.Input[str] pfs: The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
:param pulumi.Input[str] phase1_negotiation_mode: The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
:param pulumi.Input[str] tenant_id: The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
:param pulumi.Input[Mapping[str, Any]] value_specs: Map of additional options.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _IkePolicyState.__new__(_IkePolicyState)
__props__.__dict__["auth_algorithm"] = auth_algorithm
__props__.__dict__["description"] = description
__props__.__dict__["encryption_algorithm"] = encryption_algorithm
__props__.__dict__["ike_version"] = ike_version
__props__.__dict__["lifetimes"] = lifetimes
__props__.__dict__["name"] = name
__props__.__dict__["pfs"] = pfs
__props__.__dict__["phase1_negotiation_mode"] = phase1_negotiation_mode
__props__.__dict__["region"] = region
__props__.__dict__["tenant_id"] = tenant_id
__props__.__dict__["value_specs"] = value_specs
return IkePolicy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="authAlgorithm")
def auth_algorithm(self) -> pulumi.Output[Optional[str]]:
"""
The authentication hash algorithm. Valid values are sha1, sha256, sha384, sha512.
Default is sha1. Changing this updates the algorithm of the existing policy.
"""
return pulumi.get(self, "auth_algorithm")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The human-readable description for the policy.
Changing this updates the description of the existing policy.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="encryptionAlgorithm")
def encryption_algorithm(self) -> pulumi.Output[Optional[str]]:
"""
The encryption algorithm. Valid values are 3des, aes-128, aes-192 and so on.
The default value is aes-128. Changing this updates the existing policy.
"""
return pulumi.get(self, "encryption_algorithm")
@property
@pulumi.getter(name="ikeVersion")
def ike_version(self) -> pulumi.Output[Optional[str]]:
"""
The IKE mode. A valid value is v1 or v2. Default is v1.
Changing this updates the existing policy.
"""
return pulumi.get(self, "ike_version")
@property
@pulumi.getter
def lifetimes(self) -> pulumi.Output[Sequence['outputs.IkePolicyLifetime']]:
"""
The lifetime of the security association. Consists of Unit and Value.
"""
return pulumi.get(self, "lifetimes")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the policy. Changing this updates the name of
the existing policy.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def pfs(self) -> pulumi.Output[Optional[str]]:
"""
The perfect forward secrecy mode. Valid values are Group2, Group5 and Group14. Default is Group5.
Changing this updates the existing policy.
"""
return pulumi.get(self, "pfs")
@property
@pulumi.getter(name="phase1NegotiationMode")
def phase1_negotiation_mode(self) -> pulumi.Output[Optional[str]]:
"""
The IKE mode. A valid value is main, which is the default.
Changing this updates the existing policy.
"""
return pulumi.get(self, "phase1_negotiation_mode")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create a VPN service. If omitted, the
`region` argument of the provider is used. Changing this creates a new
service.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
"""
The owner of the policy. Required if admin wants to
create a service for another policy. Changing this creates a new policy.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter(name="valueSpecs")
def value_specs(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Map of additional options.
"""
return pulumi.get(self, "value_specs")
| 45.522572 | 175 | 0.648045 | 3,979 | 33,277 | 5.261121 | 0.054788 | 0.084074 | 0.079583 | 0.085125 | 0.922996 | 0.914636 | 0.9047 | 0.899016 | 0.89658 | 0.883634 | 0 | 0.011934 | 0.257145 | 33,277 | 730 | 176 | 45.584932 | 0.834911 | 0.384981 | 0 | 0.859788 | 1 | 0 | 0.096342 | 0.028304 | 0 | 0 | 0 | 0 | 0 | 1 | 0.164021 | false | 0.002646 | 0.018519 | 0 | 0.280423 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0220c72db1964e462d1e13ae7c3e0aae4c94ede9 | 2,885 | py | Python | crispy_bulma/bulma.py | ckrybus/django-crispy-bulma | 8f7f6fb47b5bc896646c678f9f05c3bf75fe3434 | [
"MIT"
] | null | null | null | crispy_bulma/bulma.py | ckrybus/django-crispy-bulma | 8f7f6fb47b5bc896646c678f9f05c3bf75fe3434 | [
"MIT"
] | null | null | null | crispy_bulma/bulma.py | ckrybus/django-crispy-bulma | 8f7f6fb47b5bc896646c678f9f05c3bf75fe3434 | [
"MIT"
] | null | null | null | from crispy_forms.layout import Field
class InlineRadios(Field):
"""
Layout object for rendering radiobuttons inline.
Attributes
----------
template : str
The default template which this Layout Object will be rendered
with.
attrs : dict
Attributes to be applied to the field. These are converted into html
attributes. e.g. ``data_id: 'test'`` in the attrs dict will become
``data-id='test'`` on the field's ``<input>``.
Parameters
----------
*fields : str
Usually a single field, but can be any number of fields, to be rendered
with the same attributes applied.
css_class : str, optional
CSS classes to be applied to the field. These are added to any classes
included in the ``attrs`` dict. By default ``None``.
wrapper_class: str, optional
CSS classes to be used when rendering the Field. This class is usually
applied to the ``<div>`` which wraps the Field's ``<label>`` and
``<input>`` tags. By default ``None``.
template : str, optional
Overrides the default template, if provided. By default ``None``.
**kwargs : dict, optional
Additional attributes are converted into key="value", pairs. These
attributes are added to the ``<div>``.
Examples
--------
Example::
InlineRadios('field_name')
"""
template = "%s/layout/radioselect_inline.html"
class InlineCheckboxes(Field):
"""
Layout object for rendering checkboxes inline.
Attributes
----------
template : str
The default template which this Layout Object will be rendered
with.
attrs : dict
Attributes to be applied to the field. These are converted into html
attributes. e.g. ``data_id: 'test'`` in the attrs dict will become
``data-id='test'`` on the field's ``<input>``.
Parameters
----------
*fields : str
Usually a single field, but can be any number of fields, to be rendered
with the same attributes applied.
css_class : str, optional
CSS classes to be applied to the field. These are added to any classes
included in the ``attrs`` dict. By default ``None``.
wrapper_class: str, optional
CSS classes to be used when rendering the Field. This class is usually
applied to the ``<div>`` which wraps the Field's ``<label>`` and
``<input>`` tags. By default ``None``.
template : str, optional
Overrides the default template, if provided. By default ``None``.
**kwargs : dict, optional
Additional attributes are converted into key="value", pairs. These
attributes are added to the ``<div>``.
Examples
--------
Example::
InlineCheckboxes('field_name')
"""
template = "%s/layout/checkboxselectmultiple_inline.html"
| 32.784091 | 79 | 0.627383 | 364 | 2,885 | 4.942308 | 0.233516 | 0.044469 | 0.040022 | 0.028905 | 0.903835 | 0.844914 | 0.844914 | 0.844914 | 0.844914 | 0.844914 | 0 | 0 | 0.266898 | 2,885 | 87 | 80 | 33.16092 | 0.850591 | 0.828076 | 0 | 0 | 0 | 0 | 0.342222 | 0.342222 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.2 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 8 |
0246b593c11d8e4ca1e4d7e2a25aad281b0d9e4b | 114 | py | Python | utils.py | joosthooz/arrow-benchmarks-ci | 102c1b64a31f37c5037d72b017064349e948ed8a | [
"MIT"
] | null | null | null | utils.py | joosthooz/arrow-benchmarks-ci | 102c1b64a31f37c5037d72b017064349e948ed8a | [
"MIT"
] | 6 | 2021-11-30T22:25:32.000Z | 2022-03-29T00:03:02.000Z | utils.py | joosthooz/arrow-benchmarks-ci | 102c1b64a31f37c5037d72b017064349e948ed8a | [
"MIT"
] | 4 | 2022-01-21T09:51:01.000Z | 2022-03-21T18:00:50.000Z | import uuid
def generate_uuid():
return uuid.uuid4().hex
class UnauthorizedException(Exception):
pass
| 11.4 | 39 | 0.72807 | 13 | 114 | 6.307692 | 0.846154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010753 | 0.184211 | 114 | 9 | 40 | 12.666667 | 0.870968 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | true | 0.2 | 0.2 | 0.2 | 0.8 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 7 |
025f93782e545c881d1190d4d6cec5a9bdaae871 | 12,057 | py | Python | src/pycoils/pycoils/tests/trees/test_bst_traversals.py | harisankar-krishna-swamy/coils | 2fb9606ee2df9c49db4ab67ee650ff8edc285a7e | [
"Apache-2.0"
] | 2 | 2020-12-29T18:37:07.000Z | 2021-05-11T12:48:04.000Z | src/pycoils/pycoils/tests/trees/test_bst_traversals.py | harisankar-krishna-swamy/coils | 2fb9606ee2df9c49db4ab67ee650ff8edc285a7e | [
"Apache-2.0"
] | null | null | null | src/pycoils/pycoils/tests/trees/test_bst_traversals.py | harisankar-krishna-swamy/coils | 2fb9606ee2df9c49db4ab67ee650ff8edc285a7e | [
"Apache-2.0"
] | null | null | null | '''
Created on Oct 19, 2015
@author: hari
'''
from __future__ import print_function
from unittest import TestCase
from pycoils.trees.bst import BinarySearchTree
import unittest
#
# Preorder traversal
#
class BST_Test_Preorder_Traversal_with_empty_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
def test_preorder_traversal_with_empty_tree(self):
pre_order_elements = []
for key, element in self._bst.traversal(want_pre_order = True, want_in_order = False, want_post_order = False):
pre_order_elements.append(element)
self.assertEqual(0, len(pre_order_elements), 'Pre order traversal on empty tree must yield no elements')
def tearDown(self):
self._bst = None
class BST_Test_Preorder_Traversal_with_Single_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 10, obj = 10)
def test_preorder_traversal_with_single_node_tree(self):
pre_order_elements = []
for key, element in self._bst.traversal(want_pre_order = True, want_in_order = False, want_post_order = False):
pre_order_elements.append(element)
self.assertEqual(1, len(pre_order_elements), 'Pre order traversal on single node tree must yield no elements')
self.assertEqual(pre_order_elements, [10], 'Pre order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
class BST_Test_Preorder_Traversal_with_10_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 5, obj = 5)
self._bst.insert(key = 8, obj = 8)
self._bst.insert(key = 7, obj = 7)
self._bst.insert(key = 9, obj = 9)
self._bst.insert(key = 10, obj = 10)
self._bst.insert(key = 2, obj = 2)
self._bst.insert(key = 1, obj = 1)
self._bst.insert(key = 3, obj = 3)
self._bst.insert(key = 4, obj = 4)
self._bst.insert(key = 6, obj = 6)
def test_preorder_traversal_with_10_node_tree(self):
pre_order_elements = []
for key, element in self._bst.traversal(want_pre_order = True, want_in_order = False, want_post_order = False):
pre_order_elements.append(element)
self.assertEqual(10, len(pre_order_elements), 'Pre order traversal on 10 node tree must yield 10 elements')
self.assertEqual(pre_order_elements, [5, 2, 1, 3, 4, 8, 7, 6, 9, 10], 'Pre order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
#
# Postorder traversal
#
class BST_Test_Postorder_Traversal_with_empty_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
def test_postorder_traversal_with_empty_tree(self):
post_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = False, want_post_order = True):
post_order_elements.append(element)
self.assertEqual(0, len(post_order_elements), 'Post order traversal on empty tree must yield no elements')
def tearDown(self):
self._bst = None
class BST_Test_Postorder_Traversal_with_Single_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 10, obj = 10)
def test_postorder_traversal_with_single_node_tree(self):
post_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = False, want_post_order = True):
post_order_elements.append(element)
self.assertEqual(1, len(post_order_elements), 'Post order traversal on single node tree must yield one elements')
self.assertEqual(post_order_elements, [10], 'Post order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
class BST_Test_Postorder_Traversal_with_10_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 5, obj = 5)
self._bst.insert(key = 8, obj = 8)
self._bst.insert(key = 7, obj = 7)
self._bst.insert(key = 9, obj = 9)
self._bst.insert(key = 10, obj = 10)
self._bst.insert(key = 2, obj = 2)
self._bst.insert(key = 1, obj = 1)
self._bst.insert(key = 3, obj = 3)
self._bst.insert(key = 4, obj = 4)
self._bst.insert(key = 6, obj = 6)
def test_postorder_traversal_with_10_node_tree(self):
post_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = False, want_post_order = True):
post_order_elements.append(element)
self.assertEqual(10, len(post_order_elements), 'Post order traversal on 10 node tree must yield 10 elements')
self.assertEqual(post_order_elements, [1, 4, 3, 2, 6, 7, 10, 9, 8, 5], 'Post order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
#
# Inorder traversal
#
class BST_Test_Inorder_Traversal_with_empty_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
def test_inorder_traversal_with_empty_tree(self):
in_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = True, want_post_order = False):
in_order_elements.append(element)
self.assertEqual(0, len(in_order_elements), 'In order traversal on empty tree must yield no elements')
def tearDown(self):
self._bst = None
class BST_Test_Inorder_Traversal_with_Single_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 10, obj = 10)
def test_inorder_traversal_with_single_node_tree(self):
in_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = True, want_post_order = False):
in_order_elements.append(element)
self.assertEqual(1, len(in_order_elements), 'In order traversal on single node tree must yield one elements')
self.assertEqual(in_order_elements, [10], 'In order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
class BST_Test_Inorder_Traversal_with_10_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 5, obj = 5)
self._bst.insert(key = 8, obj = 8)
self._bst.insert(key = 7, obj = 7)
self._bst.insert(key = 9, obj = 9)
self._bst.insert(key = 10, obj = 10)
self._bst.insert(key = 2, obj = 2)
self._bst.insert(key = 1, obj = 1)
self._bst.insert(key = 3, obj = 3)
self._bst.insert(key = 4, obj = 4)
self._bst.insert(key = 6, obj = 6)
def test_inorder_traversal_with_10_node_tree(self):
in_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = True, want_post_order = False):
in_order_elements.append(element)
self.assertEqual(10, len(in_order_elements), 'In order traversal on 10 node tree must yield 10 elements')
self.assertEqual(in_order_elements, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'In order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
class BST_Test_All_Traversals_Consecutively_For_Tree_Integirty_With_10_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 5, obj = 5)
self._bst.insert(key = 8, obj = 8)
self._bst.insert(key = 7, obj = 7)
self._bst.insert(key = 9, obj = 9)
self._bst.insert(key = 10, obj = 10)
self._bst.insert(key = 2, obj = 2)
self._bst.insert(key = 1, obj = 1)
self._bst.insert(key = 3, obj = 3)
self._bst.insert(key = 4, obj = 4)
self._bst.insert(key = 6, obj = 6)
def test_traversal_consecutively_with_10_node_tree(self):
#inorder
in_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = True, want_post_order = False):
in_order_elements.append(element)
self.assertEqual(10, len(in_order_elements), 'In order traversal on 10 node tree must yield 10 elements')
self.assertEqual(in_order_elements, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'In order traversal did not yield expected elements in order')
#postorder
post_order_elements = []
for key, element in self._bst.traversal(want_pre_order = False, want_in_order = False, want_post_order = True):
post_order_elements.append(element)
self.assertEqual(10, len(post_order_elements), 'Post order traversal on 10 node tree must yield 10 elements')
self.assertEqual(post_order_elements, [1, 4, 3, 2, 6, 7, 10, 9, 8, 5], 'Post order traversal did not yield expected elements in order')
pre_order_elements = []
for key, element in self._bst.traversal(want_pre_order = True, want_in_order = False, want_post_order = False):
pre_order_elements.append(element)
self.assertEqual(10, len(pre_order_elements), 'Pre order traversal on 10 node tree must yield 10 elements')
self.assertEqual(pre_order_elements, [5, 2, 1, 3, 4, 8, 7, 6, 9, 10], 'Pre order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
#
# Inorder traversal with stack
#
class BST_Test_Inorder_Traversal_Using_Stack_with_empty_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
def test_inorder_traversal_with_empty_tree(self):
in_order_elements = []
for key, element in self._bst.inorder_traversal_with_stack():
in_order_elements.append(element)
self.assertEqual(0, len(in_order_elements), 'In order traversal on empty tree must yield no elements')
def tearDown(self):
self._bst = None
class BST_Test_Inorder_Traversal_with_Stack_On_Single_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 10, obj = 10)
def test_inorder_traversal_with_single_node_tree(self):
in_order_elements = []
for key, element in self._bst.inorder_traversal_with_stack():
in_order_elements.append(element)
self.assertEqual(1, len(in_order_elements), 'In order traversal on single node tree must yield one elements')
self.assertEqual(in_order_elements, [10], 'In order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
class BST_Test_Inorder_Traversal_with_Stack_on_10_Node_Tree(TestCase):
def setUp(self):
self._bst = BinarySearchTree()
self._bst.insert(key = 5, obj = 5)
self._bst.insert(key = 8, obj = 8)
self._bst.insert(key = 7, obj = 7)
self._bst.insert(key = 9, obj = 9)
self._bst.insert(key = 10, obj = 10)
self._bst.insert(key = 2, obj = 2)
self._bst.insert(key = 1, obj = 1)
self._bst.insert(key = 3, obj = 3)
self._bst.insert(key = 4, obj = 4)
self._bst.insert(key = 6, obj = 6)
def test_inorder_traversal_with_10_node_tree(self):
in_order_elements = []
for key, element in self._bst.inorder_traversal_with_stack():
in_order_elements.append(element)
self.assertEqual(10, len(in_order_elements), 'In order traversal on 10 node tree must yield 10 elements')
self.assertEqual(in_order_elements, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 'In order traversal did not yield expected elements in order')
def tearDown(self):
self._bst = None
#Allows running as python run.
if __name__ == '__main__':
print('BST Traversal test')
unittest.main() | 44.821561 | 143 | 0.665257 | 1,716 | 12,057 | 4.399184 | 0.047786 | 0.088091 | 0.092992 | 0.114452 | 0.954828 | 0.94847 | 0.926083 | 0.912571 | 0.894688 | 0.894688 | 0 | 0.031165 | 0.236211 | 12,057 | 269 | 144 | 44.821561 | 0.788576 | 0.014017 | 0 | 0.842857 | 0 | 0 | 0.131626 | 0 | 0 | 0 | 0 | 0 | 0.12381 | 1 | 0.185714 | false | 0 | 0.019048 | 0 | 0.266667 | 0.009524 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
02768aa338c8856e6b917377c2431404d5357ca3 | 149 | py | Python | comment/signals/__init__.py | learnICT/Comment | a910b0595afe8f91b2055184b064653bc6842661 | [
"MIT"
] | 75 | 2018-09-08T14:29:35.000Z | 2022-03-25T16:17:06.000Z | comment/signals/__init__.py | p0-oya/Comment | 39f6fb6c40314d97391d36fc25112d6420c96991 | [
"MIT"
] | 165 | 2018-10-07T21:55:31.000Z | 2022-02-27T14:44:32.000Z | comment/signals/__init__.py | p0-oya/Comment | 39f6fb6c40314d97391d36fc25112d6420c96991 | [
"MIT"
] | 37 | 2019-12-01T19:44:23.000Z | 2022-02-13T16:46:14.000Z | from comment.signals.post_delete import * # noqa
from comment.signals.post_migrate import * # noqa
from comment.signals.post_save import * # noqa
| 37.25 | 50 | 0.778523 | 21 | 149 | 5.380952 | 0.428571 | 0.292035 | 0.477876 | 0.584071 | 0.566372 | 0.566372 | 0 | 0 | 0 | 0 | 0 | 0 | 0.14094 | 149 | 3 | 51 | 49.666667 | 0.882813 | 0.09396 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
5a896d7726f3c770b22723e48d5196385742bcd3 | 11,547 | py | Python | Packs/ParseHTMLTables/Scripts/ParseHTMLTables/ParseHTMLTables_test.py | satyakidroid/content | b5342c522d44aec8f31f4ee0fc8ad269ac970903 | [
"MIT"
] | null | null | null | Packs/ParseHTMLTables/Scripts/ParseHTMLTables/ParseHTMLTables_test.py | satyakidroid/content | b5342c522d44aec8f31f4ee0fc8ad269ac970903 | [
"MIT"
] | 51 | 2022-02-25T22:28:40.000Z | 2022-03-31T22:34:58.000Z | Packs/ParseHTMLTables/Scripts/ParseHTMLTables/ParseHTMLTables_test.py | satyakidroid/content | b5342c522d44aec8f31f4ee0fc8ad269ac970903 | [
"MIT"
] | 1 | 2021-11-27T09:12:29.000Z | 2021-11-27T09:12:29.000Z | import demistomock as demisto
import json
def test_main(mocker):
from ParseHTMLTables import main
test_data = [
{
"value": """
<html>
<h1>table1</h1>
<table>
<tr>
<th>1.head1</th>
<th>1.head2</th>
</tr>
<tr>
<td>1.item-1-1</td>
<td>1.item-1-2</td>
</tr>
<tr>
<td>1.item-2-1</td>
<td>1.item-2-2</td>
</tr>
</table>
table2
<table>
<tr>
<th>2.head1</th>
<th>2.head2</th>
</tr>
<tr>
<td>2.item-1-1</td>
<td>2.item-1-2</td>
</tr>
<tr>
<td>2.item-2-1</td>
<td>2.item-2-2</td>
</tr>
</table>
</html>
""",
"results": [
{
"table1": [
{
"1.head1": "1.item-1-1",
"1.head2": "1.item-1-2"
},
{
"1.head1": "1.item-2-1",
"1.head2": "1.item-2-2"
}
]
},
{
"table2": [
{
"2.head1": "2.item-1-1",
"2.head2": "2.item-1-2"
},
{
"2.head1": "2.item-2-1",
"2.head2": "2.item-2-2"
}
]
}
]
},
{
"value": """
<html>
<h1>table1</h1>
<table>
<tr>
<th>1.head1</th>
<th>1.head2</th>
</tr>
<tr>
<td>1.item-1-1</td>
<td>1.item-1-2</td>
</tr>
<tr>
<td>1.item-2-1</td>
<td>1.item-2-2</td>
</tr>
</table>
<h1>table2</h1>
<table>
<tr>
<th>2.head1</th>
<th>2.head2</th>
</tr>
<tr>
<td>2.item-1-1</td>
<td>2.item-1-2</td>
</tr>
<tr>
<td>2.item-2-1</td>
<td>2.item-2-2</td>
</tr>
</table>
</html>
""",
"results": [
{
"table1": [
{
"1.head1": "1.item-1-1",
"1.head2": "1.item-1-2"
},
{
"1.head1": "1.item-2-1",
"1.head2": "1.item-2-2"
}
]
},
{
"table2": [
{
"2.head1": "2.item-1-1",
"2.head2": "2.item-1-2"
},
{
"2.head1": "2.item-2-1",
"2.head2": "2.item-2-2"
}
]
}
]
},
{
"value": """
<html>
<h1>table1</h1>
<table>
<tr>
<th>1.head1</th>
<th>1.head2</th>
</tr>
<tr>
<td>1.item-1-1</td>
<td>1.item-1-2</td>
</tr>
<tr>
<td>1.item-2-1</td>
<td>1.item-2-2</td>
</tr>
</table>
<h1>tab<strong>l</strong>e2</h1>
<table>
<tr>
<th>2.head1</th>
<th>2.head2</th>
</tr>
<tr>
<td>2.item-1-1</td>
<td>2.item-1-2</td>
</tr>
<tr>
<td>2.item-2-1</td>
<td>2.item-2-2</td>
</tr>
</table>
</html>
""",
"results": [
{
"table1": [
{
"1.head1": "1.item-1-1",
"1.head2": "1.item-1-2"
},
{
"1.head1": "1.item-2-1",
"1.head2": "1.item-2-2"
}
]
},
{
"table2": [
{
"2.head1": "2.item-1-1",
"2.head2": "2.item-1-2"
},
{
"2.head1": "2.item-2-1",
"2.head2": "2.item-2-2"
}
]
}
]
},
{
"value": """
<html>
<h1>table1</h1>
<table>
<tr>
<th>1.head1</th>
<th>1.head2</th>
</tr>
<tr>
<td>1.item-1-1</td>
<td>1.item-1-2</td>
</tr>
<tr>
<td>1.item-2-1</td>
<td>1.item-2-2</td>
</tr>
</table>
tab<strong>l</strong>e2
<table>
<tr>
<th>2.head1</th>
<th>2.head2</th>
</tr>
<tr>
<td>2.item-1-1</td>
<td>2.item-1-2</td>
</tr>
<tr>
<td>2.item-2-1</td>
<td>2.item-2-2</td>
</tr>
</table>
</html>
""",
"results": [
{
"table1": [
{
"1.head1": "1.item-1-1",
"1.head2": "1.item-1-2"
},
{
"1.head1": "1.item-2-1",
"1.head2": "1.item-2-2"
}
]
},
{
"table2": [
{
"2.head1": "2.item-1-1",
"2.head2": "2.item-1-2"
},
{
"2.head1": "2.item-2-1",
"2.head2": "2.item-2-2"
}
]
}
]
},
{
"value": """
<html>
<h1>table1</h1>
<table>
<tr>
<th>1.head1</th>
<th>1.head2</th>
</tr>
<tr>
<td>1.item-1-1</td>
<td>1.item-1-2</td>
</tr>
<tr>
<td>1.item-2-1</td>
<td>1.item-2-2</td>
</tr>
</table>
<table>
<tr>
<th>2.head1</th>
<th>2.head2</th>
</tr>
<tr>
<td>2.item-1-1</td>
<td>2.item-1-2</td>
</tr>
<tr>
<td>2.item-2-1</td>
<td>2.item-2-2</td>
</tr>
</table>
</html>
""",
"results": [
{
"table1": [
{
"1.head1": "1.item-1-1",
"1.head2": "1.item-1-2"
},
{
"1.head1": "1.item-2-1",
"1.head2": "1.item-2-2"
}
]
},
{
"No Title": [
{
"2.head1": "2.item-1-1",
"2.head2": "2.item-1-2"
},
{
"2.head1": "2.item-2-1",
"2.head2": "2.item-2-2"
}
]
}
]
},
{
"value": """
<html>
<h1>table 1</h1>
<table>
<tr>
<th>1.head1</th>
<th>1.head2</th>
</tr>
<tr>
<td>1.item-1-1</td>
<td>1.item-1-2</td>
</tr>
<tr>
<td>1.item-2-1</td>
<td>1.item-2-2</td>
</tr>
</table>
</html>
""",
"results": [
{
"table 1": [
{
"1.head1": "1.item-1-1",
"1.head2": "1.item-1-2"
},
{
"1.head1": "1.item-2-1",
"1.head2": "1.item-2-2"
}
]
}
]
}
]
for t in test_data:
mocker.patch.object(demisto, 'args', return_value={
'value': t['value']
})
mocker.patch.object(demisto, 'results')
main()
assert demisto.results.call_count == 1
results = demisto.results.call_args[0][0]
assert json.dumps(results) == json.dumps(t['results'])
| 31.377717 | 62 | 0.183251 | 834 | 11,547 | 2.529976 | 0.053957 | 0.113744 | 0.079621 | 0.03981 | 0.848341 | 0.83128 | 0.83128 | 0.83128 | 0.826066 | 0.826066 | 0 | 0.120199 | 0.686585 | 11,547 | 367 | 63 | 31.463215 | 0.462835 | 0 | 0 | 0.679666 | 0 | 0 | 0.605265 | 0.004763 | 0 | 0 | 0 | 0 | 0.005571 | 1 | 0.002786 | false | 0 | 0.008357 | 0 | 0.011142 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
ce52cdf18b245b4876b962304f183a7f359f9e48 | 6,326 | py | Python | markovgenerator/test_markovmodeller.py | louism33/chatbot | 2c17d80823016a56e3dd8d36539d58928569a357 | [
"MIT"
] | null | null | null | markovgenerator/test_markovmodeller.py | louism33/chatbot | 2c17d80823016a56e3dd8d36539d58928569a357 | [
"MIT"
] | null | null | null | markovgenerator/test_markovmodeller.py | louism33/chatbot | 2c17d80823016a56e3dd8d36539d58928569a357 | [
"MIT"
] | null | null | null | from unittest import TestCase
from chatbot.markovgenerator import markovmodeller
from chatbot.markovgenerator.mmodel.markovmodel import MarkovModel
class Test_Markov_Basics(TestCase):
basic_text = "Hello I am a robot"
basic_text_repetition = "Hello I am a robot robot robot"
basic_text_repetition_normalise = "Hello I am a Robot robot rObOt"
basic_text_multiple = "Hello I am a robot I kill"
basic_text_multiple_2 = "you eat you shoot you leave"
basic_text_full_stop = "Hello I am a robot."
def test_build_markov_model(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 5)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(len(model.find_node_by_name("i").connections), 1)
self.assertEqual(len(model.find_node_by_name("am").connections), 1)
def test_build_markov_model_repetition(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_repetition)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 5)
self.assertEqual(len(model.find_node_by_name("robot").connections), 1)
self.assertEqual(len(model.find_node_by_name("roBOT").connections), 1)
def test_build_markov_model_repetition_normalise(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_repetition_normalise)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 5)
def test_build_markov_model_multiple(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_multiple)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 6)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(len(model.find_node_by_name("i").connections), 2)
self.assertEqual(len(model.find_node_by_name("am").connections), 1)
def test_build_markov_model_multiple_2(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_multiple_2)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 4)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(len(model.find_node_by_name("you").connections), 3)
self.assertEqual(len(model.find_node_by_name("eat").connections), 1)
self.assertIsNone(model.find_node_by_name("definitelyNotAWord"))
def test_build_markov_model_full_stop(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_full_stop)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 6)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(len(model.find_node_by_name("i").connections), 1)
self.assertEqual(len(model.find_node_by_name("am").connections), 1)
def test_basic_random_walk(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text)
self.assertEqual(len(model), 5)
walk_string = markovmodeller.get_max_walk(model)
self.assertEqual("hello i am a robot", walk_string)
def test_basic_random_walk_2(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_full_stop)
self.assertEqual(len(model), 6)
walk_string = markovmodeller.get_max_walk(model)
self.assertEqual("hello i am a robot .", walk_string)
class Test_Markov_Multiple(TestCase):
basic_text = "Hello I am a robot. Hello I am a robot."
text_punctuation = "Hello! I, am a: robot. How are you?"
basic_text_cyborg = "Hello I am a robot. Hello I am a robot. heLlO i Am A hUmAn."
basic_text_3_ways = "Mr Anderson. Madame Bovary. Lord Buckethead."
def test_build_markov_model(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text)
self.assertIsInstance(model, MarkovModel)
self.assertEqual(len(model), 6)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(len(model.find_node_by_name("i").connections), 1)
self.assertEqual(len(model.find_node_by_name("am").connections), 1)
def test_build_markov_model_punctuation(self):
model = markovmodeller.build_markov_model_from_string(self.text_punctuation)
self.assertEqual(len(model), 13)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 3)
self.assertEqual(len(model.find_node_by_name("i").connections), 1)
self.assertEqual(len(model.find_node_by_name("am").connections), 1)
def test_build_markov_model_cyborg_max_walk(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_cyborg)
self.assertEqual(len(model), 7)
start_node = model.get_start_node()
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(len(start_node.connections), 1)
self.assertEqual(start_node.connections[0].weight, 3)
self.assertEqual(len(model.find_node_by_name("i").connections), 1)
self.assertEqual(len(model.find_node_by_name("am").connections), 1)
self.assertEqual(len(model.find_node_by_name("a").connections), 2)
walk_string_max = markovmodeller.get_max_walk(model)
self.assertTrue("hello i am a robot ." == walk_string_max)
def test_build_markov_model_cyborg(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_cyborg)
walk_string = markovmodeller.get_walk(model)
self.assertTrue("hello i am a robot ." == walk_string or "hello i am a human ." == walk_string)
def test_build_markov_model_3_ways(self):
model = markovmodeller.build_markov_model_from_string(self.basic_text_3_ways)
walk_string = markovmodeller.get_walk(model)
self.assertTrue("mr anderson ." == walk_string
or "madame bovary ." == walk_string
or "lord buckethead ." == walk_string)
| 48.661538 | 103 | 0.723522 | 850 | 6,326 | 5.071765 | 0.087059 | 0.135699 | 0.150313 | 0.149385 | 0.842264 | 0.796103 | 0.774298 | 0.774298 | 0.721642 | 0.655532 | 0 | 0.008838 | 0.177205 | 6,326 | 129 | 104 | 49.03876 | 0.819404 | 0 | 0 | 0.504762 | 0 | 0 | 0.082517 | 0 | 0 | 0 | 0 | 0 | 0.47619 | 1 | 0.12381 | false | 0 | 0.028571 | 0 | 0.266667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ce85ce446b51b7676e88860d665d3f70055b72e7 | 9,174 | py | Python | binary_search_tree/binary_search_tree_tests.py | joaojunior/data_structure | 46857d369edc288c078744d158928873be1b723e | [
"MIT"
] | null | null | null | binary_search_tree/binary_search_tree_tests.py | joaojunior/data_structure | 46857d369edc288c078744d158928873be1b723e | [
"MIT"
] | 13 | 2018-02-14T23:36:39.000Z | 2018-02-20T00:41:11.000Z | binary_search_tree/binary_search_tree_tests.py | joaojunior/data_structure | 46857d369edc288c078744d158928873be1b723e | [
"MIT"
] | null | null | null | import unittest
from binary_search_tree import BinarySearchTree, Node
def create_node(id_, value):
return Node(id_, value)
class TestInsertInBinarySearchTree(unittest.TestCase):
def setUp(self):
self.bst = BinarySearchTree()
def test_insert_two_nodes_in_ascendent_order(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
self.bst.insert(node1)
self.bst.insert(node2)
self.assertEqual(2, self.bst.number_of_nodes)
self.assertEqual(node1, self.bst.root)
self.assertEqual(None, self.bst.root.left)
self.assertEqual(node2, self.bst.root.right)
def test_insert_two_nodes_in_descendent_order(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
self.bst.insert(node2)
self.bst.insert(node1)
self.assertEqual(2, self.bst.number_of_nodes)
self.assertEqual(node2, self.bst.root)
self.assertEqual(node1, self.bst.root.left)
self.assertEqual(None, self.bst.root.right)
def test_insert_tree_nodes_in_ascendent_order(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
self.bst.insert(node1)
self.bst.insert(node2)
self.bst.insert(node3)
self.assertEqual(3, self.bst.number_of_nodes)
self.assertEqual(node1, self.bst.root)
self.assertEqual(None, self.bst.root.left)
self.assertEqual(node2, self.bst.root.right)
self.assertEqual(node3, self.bst.root.right.right)
def test_insert_tree_nodes_in_descendent_order(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
self.bst.insert(node3)
self.bst.insert(node2)
self.bst.insert(node1)
self.assertEqual(3, self.bst.number_of_nodes)
self.assertEqual(node3, self.bst.root)
self.assertEqual(None, self.bst.root.right)
self.assertEqual(node2, self.bst.root.left)
self.assertEqual(node1, self.bst.root.left.left)
def test_insert_tree_nodes(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
self.bst.insert(node2)
self.bst.insert(node3)
self.bst.insert(node1)
self.assertEqual(3, self.bst.number_of_nodes)
self.assertEqual(node2, self.bst.root)
self.assertEqual(node3, self.bst.root.right)
self.assertEqual(node1, self.bst.root.left)
def test_insert_tree_equal_nodes(self):
node1 = create_node(1, 1)
self.bst.insert(node1)
self.bst.insert(node1)
self.bst.insert(node1)
self.assertEqual(3, self.bst.number_of_nodes)
self.assertEqual(node1, self.bst.root)
self.assertEqual(node1, self.bst.root.right)
self.assertEqual(node1, self.bst.root.right.right)
class TestRemoveInBinarySearchTree(unittest.TestCase):
def setUp(self):
self.bst = BinarySearchTree()
def test_remove_root_with_no_children(self):
node1 = create_node(1, 1)
self.bst.insert(node1)
result = self.bst.remove(1)
self.assertEqual(0, self.bst.number_of_nodes)
self.assertEqual(node1, result)
self.assertEqual(None, self.bst.root)
def test_remove_root_with_one_child_left(self):
node1 = create_node(1, 1)
node0 = create_node(0, 0)
self.bst.insert(node1)
self.bst.insert(node0)
result = self.bst.remove(node1.value)
self.assertEqual(1, self.bst.number_of_nodes)
self.assertEqual(node1, result)
self.assertEqual(node0, self.bst.root)
def test_remove_root_with_one_child_right(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
self.bst.insert(node1)
self.bst.insert(node2)
result = self.bst.remove(node1.value)
self.assertEqual(1, self.bst.number_of_nodes)
self.assertEqual(node1, result)
self.assertEqual(node2, self.bst.root)
def test_remove_root_with_two_children(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
self.bst.insert(node2)
self.bst.insert(node1)
self.bst.insert(node3)
result = self.bst.remove(node2.value)
self.assertEqual(2, self.bst.number_of_nodes)
self.assertEqual(node2, result)
self.assertEqual(node3, self.bst.root)
self.assertEqual(node1, self.bst.root.left)
self.assertEqual(None, self.bst.root.right)
def test_remove_left_leaf(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
self.bst.insert(node2)
self.bst.insert(node1)
self.bst.insert(node3)
result = self.bst.remove(node1.value)
self.assertEqual(2, self.bst.number_of_nodes)
self.assertEqual(node1, result)
self.assertEqual(node2, self.bst.root)
self.assertEqual(None, self.bst.root.left)
self.assertEqual(node3, self.bst.root.right)
def test_remove_right_leaf(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
self.bst.insert(node2)
self.bst.insert(node1)
self.bst.insert(node3)
result = self.bst.remove(node3.value)
self.assertEqual(2, self.bst.number_of_nodes)
self.assertEqual(node3, result)
self.assertEqual(node2, self.bst.root)
self.assertEqual(node1, self.bst.root.left)
self.assertEqual(None, self.bst.root.right)
def test_remove_node_with_one_right_child(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
node4 = create_node(4, 4)
self.bst.insert(node2)
self.bst.insert(node1)
self.bst.insert(node3)
self.bst.insert(node4)
result = self.bst.remove(node3.value)
self.assertEqual(3, self.bst.number_of_nodes)
self.assertEqual(node3, result)
self.assertEqual(node2, self.bst.root)
self.assertEqual(node1, self.bst.root.left)
self.assertEqual(node4, self.bst.root.right)
def test_remove_node_with_one_left_child(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
node0 = create_node(0, 0)
self.bst.insert(node2)
self.bst.insert(node1)
self.bst.insert(node3)
self.bst.insert(node0)
result = self.bst.remove(node1.value)
self.assertEqual(3, self.bst.number_of_nodes)
self.assertEqual(node1, result)
self.assertEqual(node2, self.bst.root)
self.assertEqual(node0, self.bst.root.left)
self.assertEqual(node3, self.bst.root.right)
def test_remove_node_with_two_children(self):
node1 = create_node(1, 1)
node2 = create_node(2, 2)
node3 = create_node(3, 3)
node4 = create_node(4, 4)
node5 = create_node(5, 3.5)
self.bst.insert(node2)
self.bst.insert(node1)
self.bst.insert(node3)
self.bst.insert(node4)
self.bst.insert(node5)
result = self.bst.remove(node3.value)
self.assertEqual(4, self.bst.number_of_nodes)
self.assertEqual(node3, result)
self.assertEqual(node2, self.bst.root)
self.assertEqual(node1, self.bst.root.left)
self.assertEqual(node5, self.bst.root.right)
self.assertEqual(node4, self.bst.root.right.right)
class TestSearchInBinarySearchTree(unittest.TestCase):
def setUp(self):
self.bst = BinarySearchTree()
self.node1 = create_node(1, 1)
self.node2 = create_node(2, 2)
self.node3 = create_node(3, 3)
self.bst.insert(self.node2)
self.bst.insert(self.node3)
self.bst.insert(self.node1)
def test_search_root_node(self):
result = self.bst.search(self.node2.value)
self.assertEqual(self.node2, result)
def test_search_right_leaf(self):
result = self.bst.search(self.node1.value)
self.assertEqual(self.node1, result)
def test_search_left_leaf(self):
result = self.bst.search(self.node3.value)
self.assertEqual(self.node3, result)
def test_search_node_not_exist(self):
result = self.bst.search(4)
self.assertEqual(None, result)
class TestEmptyInBinarySearchTree(unittest.TestCase):
def setUp(self):
self.bst = BinarySearchTree()
def test_empty_binary_search_tree(self):
self.assertEqual(0, self.bst.number_of_nodes)
self.assertEqual(None, self.bst.root)
def test_insert_node(self):
node1 = create_node(1, 1)
self.bst.insert(node1)
self.assertEqual(1, self.bst.number_of_nodes)
self.assertEqual(node1, self.bst.root)
def test_search_node(self):
result = self.bst.search(1)
self.assertEqual(None, result)
def test_remove_node(self):
result = self.bst.remove(1)
self.assertEqual(None, result)
if __name__ == '__main__':
unittest.main()
| 32.189474 | 58 | 0.646719 | 1,234 | 9,174 | 4.65316 | 0.051864 | 0.154824 | 0.106409 | 0.056426 | 0.88558 | 0.863636 | 0.830024 | 0.769767 | 0.722919 | 0.703762 | 0 | 0.037504 | 0.2385 | 9,174 | 284 | 59 | 32.302817 | 0.784426 | 0 | 0 | 0.723214 | 0 | 0 | 0.000872 | 0 | 0 | 0 | 0 | 0 | 0.339286 | 1 | 0.125 | false | 0 | 0.008929 | 0.004464 | 0.15625 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ceae9e32c3858c7bd2a2095dcb68f57e1ccfc36a | 116 | py | Python | algorithms/__init__.py | liron14/Ad-Selector | bf413ca980921e35c2840aaf5a7e450f74755649 | [
"MIT"
] | null | null | null | algorithms/__init__.py | liron14/Ad-Selector | bf413ca980921e35c2840aaf5a7e450f74755649 | [
"MIT"
] | null | null | null | algorithms/__init__.py | liron14/Ad-Selector | bf413ca980921e35c2840aaf5a7e450f74755649 | [
"MIT"
] | null | null | null | """
imports
"""
from algorithms.ad_order import order_ads
from algorithms.ad_order_limited import order_ads_limited
| 19.333333 | 57 | 0.836207 | 17 | 116 | 5.352941 | 0.470588 | 0.307692 | 0.351648 | 0.461538 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.094828 | 116 | 5 | 58 | 23.2 | 0.866667 | 0.060345 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
ceb5b151a2450e3c084962f7492539d0232847db | 183 | py | Python | thornpy/__init__.py | bthornton191/thorpy | 3f5ffac31ef2d6d5763eb1a1abb3328c23ce5612 | [
"MIT"
] | null | null | null | thornpy/__init__.py | bthornton191/thorpy | 3f5ffac31ef2d6d5763eb1a1abb3328c23ce5612 | [
"MIT"
] | 8 | 2020-03-24T18:08:00.000Z | 2022-01-13T03:26:26.000Z | thornpy/__init__.py | bthornton191/thornpy | 45acce317d56f0f3573f354936f352ac576766f9 | [
"MIT"
] | null | null | null | """thornpy is a set of miscellaneous python tools created by Ben Thornton for Ben Thornton
"""
from thornpy import numtype
from thornpy import utilities
from thornpy import mechanics
| 30.5 | 90 | 0.814208 | 27 | 183 | 5.518519 | 0.666667 | 0.221477 | 0.342282 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.153005 | 183 | 5 | 91 | 36.6 | 0.96129 | 0.47541 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
cecd78a0601da50160a4b126ce91fc68143fb40a | 5,158 | py | Python | nalu.py | SthePasso/NALU2IM | 5cacab314cefd6ddb907086471bf757461777b97 | [
"MIT"
] | null | null | null | nalu.py | SthePasso/NALU2IM | 5cacab314cefd6ddb907086471bf757461777b97 | [
"MIT"
] | null | null | null | nalu.py | SthePasso/NALU2IM | 5cacab314cefd6ddb907086471bf757461777b97 | [
"MIT"
] | null | null | null | """The Model Implementation of Neural Arithmetic Logical Unit"""
import argparse
import numpy as np
import matplotlib.pyplot as plt
import mxnet as mx
from mxnet import gluon, autograd, nd
from mxnet.gluon import nn
class NAC(nn.Block):
def __init__(self, in_units, units):
super(NAC, self).__init__()
self.W_hat = self.params.get('W_hat', shape=(in_units, units))
self.M_hat = self.params.get('M_hat', shape=(in_units, units))
def forward(self, x):
W = nd.tanh(self.W_hat.data()) * nd.sigmoid(self.M_hat.data())
return nd.dot(x, W)
class NALU(nn.Block): #replicate state of art
def __init__(self, in_units, units):
super(NALU, self).__init__()
self.W0_hat = self.params.get('W0_hat', shape=(in_units, units))
self.M0_hat = self.params.get('M0_hat', shape=(in_units, units))
#self.W1_hat = self.params.get('W1_hat', shape=(in_units, units))
#self.M1_hat = self.params.get('M1_hat', shape=(in_units, units))
self.dependent_G = True # whether the gate is dependent on the input
if self.dependent_G:
self.G = self.params.get('G', shape=(in_units, units))
else:
self.G = self.params.get('G', shape=(units,))
def forward(self, x):
if self.dependent_G:
g = nd.sigmoid(nd.dot(x, self.G.data()))
else:
g = nd.sigmoid(self.G.data())
W0 = nd.tanh(self.W0_hat.data()) * nd.sigmoid(self.M0_hat.data())
#W1 = nd.tanh(self.W1_hat.data()) * nd.sigmoid(self.M1_hat.data())
a = nd.dot(x, W0)
m = nd.exp(nd.dot(nd.log(nd.abs(x) + 1e-10), W0))
y = g * a + (1 - g) * m
return y
class NALU2M(nn.Block): #replicate state of art
def __init__(self, in_units, units):
super(NALU2M, self).__init__()
self.W0_hat = self.params.get('W0_hat', shape=(in_units, units))
self.M0_hat = self.params.get('M0_hat', shape=(in_units, units))
self.W1_hat = self.params.get('W1_hat', shape=(in_units, units))
self.M1_hat = self.params.get('M1_hat', shape=(in_units, units))
self.dependent_G = True # whether the gate is dependent on the input
if self.dependent_G:
self.G = self.params.get('G', shape=(in_units, units))
else:
self.G = self.params.get('G', shape=(units,))
def forward(self, x):
if self.dependent_G:
g = nd.sigmoid(nd.dot(x, self.G.data()))
else:
g = nd.sigmoid(self.G.data())
W0 = nd.tanh(self.W0_hat.data()) * nd.sigmoid(self.M0_hat.data())
W1 = nd.tanh(self.W1_hat.data()) * nd.sigmoid(self.M1_hat.data())
a = nd.dot(x, W0)
m = nd.exp(nd.dot(nd.log(nd.abs(x) + 1e-10), W1))
y = g * a + (1 - g) * m
return y
class NALUIG(nn.Block):
def __init__(self, in_units, units):
super(NALUIG, self).__init__()
self.W0_hat = self.params.get('W0_hat', shape=(in_units, units))
self.M0_hat = self.params.get('M0_hat', shape=(in_units, units))
#self.W1_hat = self.params.get('W1_hat', shape=(in_units, units))
#self.M1_hat = self.params.get('M1_hat', shape=(in_units, units))
self.dependent_G = False # whether the gate is dependent on the input
if self.dependent_G:
self.G = self.params.get('G', shape=(in_units, units))
else:
self.G = self.params.get('G', shape=(units,))
def forward(self, x):
if self.dependent_G:
g = nd.sigmoid(nd.dot(x, self.G.data()))
else:
g = nd.sigmoid(self.G.data())
W0 = nd.tanh(self.W0_hat.data()) * nd.sigmoid(self.M0_hat.data())
#W1 = nd.tanh(self.W1_hat.data()) * nd.sigmoid(self.M1_hat.data())
a = nd.dot(x, W0)
m = nd.exp(nd.dot(nd.log(nd.abs(x) + 1e-10), W0))
y = g * a + (1 - g) * m
return y
class NALU2MIG(nn.Block):
def __init__(self, in_units, units):
super(NALU2MIG, self).__init__()
self.W0_hat = self.params.get('W0_hat', shape=(in_units, units))
self.M0_hat = self.params.get('M0_hat', shape=(in_units, units))
self.W1_hat = self.params.get('W1_hat', shape=(in_units, units))
self.M1_hat = self.params.get('M1_hat', shape=(in_units, units))
self.dependent_G = False # whether the gate is dependent on the input
if self.dependent_G:
self.G = self.params.get('G', shape=(in_units, units))
else:
self.G = self.params.get('G', shape=(units,))
def forward(self, x):
if self.dependent_G:
g = nd.sigmoid(nd.dot(x, self.G.data()))
else:
g = nd.sigmoid(self.G.data())
W0 = nd.tanh(self.W0_hat.data()) * nd.sigmoid(self.M0_hat.data())
W1 = nd.tanh(self.W1_hat.data()) * nd.sigmoid(self.M1_hat.data())
a = nd.dot(x, W0)
m = nd.exp(nd.dot(nd.log(nd.abs(x) + 1e-10), W1))
y = g * a + (1 - g) * m
return y | 36.58156 | 79 | 0.563978 | 794 | 5,158 | 3.496222 | 0.094458 | 0.068084 | 0.116715 | 0.134726 | 0.885086 | 0.865274 | 0.856628 | 0.856628 | 0.856628 | 0.817003 | 0 | 0.022484 | 0.275688 | 5,158 | 141 | 80 | 36.58156 | 0.720557 | 0.12815 | 0 | 0.8 | 0 | 0 | 0.020733 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.06 | 0 | 0.26 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ced498394068923660457dd3717ff13f6a065787 | 10,163 | py | Python | src/command_modules/azure-cli-sql/azure/cli/command_modules/sql/tests/test_sql_commands.py | 0cool321/azure-cli | fd8e6d46d5cee682aff51e262c06bc40c01636ba | [
"MIT"
] | 2 | 2020-07-22T18:53:05.000Z | 2021-09-11T05:52:33.000Z | src/command_modules/azure-cli-sql/azure/cli/command_modules/sql/tests/test_sql_commands.py | 0cool321/azure-cli | fd8e6d46d5cee682aff51e262c06bc40c01636ba | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-sql/azure/cli/command_modules/sql/tests/test_sql_commands.py | 0cool321/azure-cli | fd8e6d46d5cee682aff51e262c06bc40c01636ba | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.test_utils.vcr_test_base import (
ResourceGroupVCRTestBase, JMESPathCheck, NoneCheck)
class SqlServerMgmtScenarioTest(ResourceGroupVCRTestBase):
def __init__(self, test_method):
super(SqlServerMgmtScenarioTest, self).__init__(__file__, test_method,
resource_group='cli-test-sql-mgmt')
self.sql_server_names = ['cliautomation01', 'cliautomation02']
self.location = "westus"
self.administrator_login = 'admin123'
self.administrator_login_password = 'SecretPassword123'
def test_sql_mgmt(self):
self.execute()
def body(self):
rg = self.resource_group
loc = self.location
user = self.administrator_login
password = self.administrator_login_password
# test create sql server with minimal required parameters
self.cmd('sql server create -g {} --server-name {} -l {} '
'--administrator-login {} --administrator-login-password {}'
.format(rg, self.sql_server_names[0], loc, user, password), checks=[
JMESPathCheck('name', self.sql_server_names[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('administratorLogin', user)])
# test list sql server should be 1
self.cmd('sql server list -g {}'.format(rg), checks=[JMESPathCheck('length(@)', 1)])
# test create another sql server
self.cmd('sql server create -g {} --server-name {} -l {} '
'--administrator-login {} --administrator-login-password {}'
.format(rg, self.sql_server_names[1], loc, user, password), checks=[
JMESPathCheck('name', self.sql_server_names[1]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('administratorLogin', user)])
# test list sql server should be 2
self.cmd('sql server list -g {}'.format(rg), checks=[JMESPathCheck('length(@)', 2)])
# test show sql server
self.cmd('sql server show -g {} --server-name {}'
.format(rg, self.sql_server_names[0]), checks=[
JMESPathCheck('name', self.sql_server_names[0]),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('administratorLogin', user)])
# test delete sql server
self.cmd('sql server delete -g {} --server-name {}'
.format(rg, self.sql_server_names[0]), checks=NoneCheck())
self.cmd('sql server delete -g {} --server-name {}'
.format(rg, self.sql_server_names[1]), checks=NoneCheck())
# test list sql server should be 0
self.cmd('sql server list -g {}'.format(rg), checks=[JMESPathCheck('length(@)', 0)])
class SqlServerFirewallMgmtScenarioTest(ResourceGroupVCRTestBase):
def __init__(self, test_method):
super(SqlServerFirewallMgmtScenarioTest, self).__init__(__file__, test_method,
resource_group='cli-test-sql-mgmt')
self.sql_server_name = 'cliautomation03'
self.location = "westus"
self.administrator_login = 'admin123'
self.administrator_login_password = 'SecretPassword123'
def test_sql_firewall_mgmt(self):
self.execute()
def body(self):
rg = self.resource_group
loc = self.location
user = self.administrator_login
password = self.administrator_login_password
firewall_rule_1 = 'rule1'
start_ip_address_1 = '0.0.0.0'
end_ip_address_1 = '255.255.255.255'
firewall_rule_2 = 'rule2'
start_ip_address_2 = '123.123.123.123'
end_ip_address_2 = '123.123.123.124'
# test create sql server with minimal required parameters
self.cmd('sql server create -g {} --server-name {} -l {} '
'--administrator-login {} --administrator-login-password {}'
.format(rg, self.sql_server_name, loc, user, password), checks=[
JMESPathCheck('name', self.sql_server_name),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('administratorLogin', user)])
# test sql server firewall create
self.cmd('sql server firewall create --firewall-rule-name {} -g {} --server-name {} '
'--start-ip-address {} --end-ip-address {}'
.format(firewall_rule_1, rg, self.sql_server_name,
start_ip_address_1, end_ip_address_1), checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_1),
JMESPathCheck('endIpAddress', end_ip_address_1)])
# test sql server firewall show
self.cmd('sql server firewall show --firewall-rule-name {} -g {} --server-name {}'
.format(firewall_rule_1, rg, self.sql_server_name), checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_1),
JMESPathCheck('endIpAddress', end_ip_address_1)])
# test sql server firewall update
self.cmd('sql server firewall update --firewall-rule-name {} -g {} --server-name {} '
'--start-ip-address {} --end-ip-address {}'
.format(firewall_rule_1, rg, self.sql_server_name,
start_ip_address_2, end_ip_address_2), checks=[
JMESPathCheck('name', firewall_rule_1),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_2),
JMESPathCheck('endIpAddress', end_ip_address_2)])
# test sql server firewall create another rule
self.cmd('sql server firewall create --firewall-rule-name {} -g {} --server-name {} '
'--start-ip-address {} --end-ip-address {}'
.format(firewall_rule_2, rg, self.sql_server_name,
start_ip_address_2, end_ip_address_2), checks=[
JMESPathCheck('name', firewall_rule_2),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('startIpAddress', start_ip_address_2),
JMESPathCheck('endIpAddress', end_ip_address_2)])
# test sql server firewall list
self.cmd('sql server firewall list -g {} --server-name {}'
.format(rg, self.sql_server_name), checks=[JMESPathCheck('length(@)', 2)])
# test sql server firewall delete
self.cmd('sql server firewall delete --firewall-rule-name {} -g {} --server-name {}'
.format(firewall_rule_1, rg, self.sql_server_name), checks=NoneCheck())
self.cmd('sql server firewall list -g {} --server-name {}'
.format(rg, self.sql_server_name), checks=[JMESPathCheck('length(@)', 1)])
self.cmd('sql server firewall delete --firewall-rule-name {} -g {} --server-name {}'
.format(firewall_rule_2, rg, self.sql_server_name), checks=NoneCheck())
self.cmd('sql server firewall list -g {} --server-name {}'
.format(rg, self.sql_server_name), checks=[JMESPathCheck('length(@)', 0)])
# test delete sql server
self.cmd('sql server delete -g {} --server-name {}'
.format(rg, self.sql_server_name), checks=NoneCheck())
class SqlServerServiceObjectiveMgmtScenarioTest(ResourceGroupVCRTestBase):
def __init__(self, test_method):
super(SqlServerServiceObjectiveMgmtScenarioTest, self).__init__(
__file__, test_method, resource_group='cli-test-sql-mgmt')
self.sql_server_name = 'cliautomation04'
self.location = "westus"
self.administrator_login = 'admin123'
self.administrator_login_password = 'SecretPassword123'
def test_sql_service_objective_mgmt(self):
self.execute()
def body(self):
rg = self.resource_group
loc = self.location
user = self.administrator_login
password = self.administrator_login_password
# test create sql server with minimal required parameters
self.cmd('sql server create -g {} --server-name {} -l {} '
'--administrator-login {} --administrator-login-password {}'
.format(rg, self.sql_server_name, loc, user, password), checks=[
JMESPathCheck('name', self.sql_server_name),
JMESPathCheck('resourceGroup', rg),
JMESPathCheck('administratorLogin', user)])
# test sql server service-objective list
service_objectives = self.cmd('sql server service-objective list -g {} --server-name {}'
.format(rg, self.sql_server_name), checks=[
JMESPathCheck('length(@)', 42)])
# test sql server service-objective show
self.cmd('sql server service-objective show -g {} --server-name {} '
'--service-objective-name {}'
.format(rg, self.sql_server_name, service_objectives[0]['name']), checks=[
JMESPathCheck('name', service_objectives[0]['name']),
JMESPathCheck('resourceGroup', rg)])
# test delete sql server
self.cmd('sql server delete -g {} --server-name {}'
.format(rg, self.sql_server_name), checks=NoneCheck())
| 50.815 | 99 | 0.579061 | 1,028 | 10,163 | 5.515564 | 0.101167 | 0.111111 | 0.064198 | 0.064903 | 0.859436 | 0.822928 | 0.792945 | 0.760847 | 0.760847 | 0.760494 | 0 | 0.016423 | 0.287022 | 10,163 | 199 | 100 | 51.070352 | 0.766078 | 0.098691 | 0 | 0.64539 | 0 | 0 | 0.243542 | 0.024956 | 0 | 0 | 0 | 0 | 0 | 1 | 0.06383 | false | 0.099291 | 0.007092 | 0 | 0.092199 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
0ccb531058747844d86969a980b48e7fcdd4c475 | 98 | py | Python | mpunet/errors/image_errors.py | alexsosn/MultiPlanarUNet | 2d1cecdee391be8e9f72da95e33077ed82a2183a | [
"MIT"
] | 156 | 2018-12-19T19:21:30.000Z | 2022-03-10T13:14:52.000Z | mpunet/errors/image_errors.py | alexsosn/MultiPlanarUNet | 2d1cecdee391be8e9f72da95e33077ed82a2183a | [
"MIT"
] | 25 | 2019-07-30T07:45:26.000Z | 2022-02-10T00:38:31.000Z | mpunet/errors/image_errors.py | alexsosn/MultiPlanarUNet | 2d1cecdee391be8e9f72da95e33077ed82a2183a | [
"MIT"
] | 33 | 2019-01-26T16:34:50.000Z | 2022-02-20T13:48:44.000Z |
class NoLabelFileError(AttributeError): pass
class ReadOnlyAttributeError(AttributeError): pass
| 19.6 | 50 | 0.857143 | 8 | 98 | 10.5 | 0.625 | 0.428571 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081633 | 98 | 4 | 51 | 24.5 | 0.933333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 1 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 7 |
0cce4d3db76e85915194141c015f79c62466973f | 3,247 | py | Python | mlc_tools/module_php/generator_factory.py | mlc-tools/mlc-tools | 1ee8e82e438cda2cc1efd334d69773d1a29a0e0c | [
"MIT"
] | 1 | 2018-05-07T09:32:57.000Z | 2018-05-07T09:32:57.000Z | mlc_tools/module_php/generator_factory.py | mlc-tools/mlc-tools | 1ee8e82e438cda2cc1efd334d69773d1a29a0e0c | [
"MIT"
] | 4 | 2019-09-27T09:33:34.000Z | 2020-04-13T13:48:02.000Z | mlc_tools/module_php/generator_factory.py | mlc-tools/mlc-tools | 1ee8e82e438cda2cc1efd334d69773d1a29a0e0c | [
"MIT"
] | 1 | 2018-02-23T01:04:44.000Z | 2018-02-23T01:04:44.000Z |
from .writer import Writer
class GeneratorFactory(object):
def __init__(self):
pass
@staticmethod
def generate(model):
writer = Writer('')
writer.model = model
content = writer.prepare_file(FACTORY)
model.add_file(None, 'Factory.php', content)
FACTORY = '''<?php
class Factory
{
static function build($type)
{
require_once "$type.php";
return new $type;
}
{{format=xml}}
static function create_command_from_xml($payload)
{
$xml = simplexml_load_string($payload);
$class = $xml->getName();
require_once "$class.php";
$command = new $class;
$command->deserialize_xml($xml);
return $command;
}
static function serialize_command_to_xml($command)
{
$xml = simplexml_load_string('<'.$command->get_type().'/>');
$command->serialize_xml($xml);
return $xml->asXML();
}
static function clone_object($obj)
{
$payload = Factory::serialize_command_to_xml($obj);
$clone = Factory::create_command_from_xml($payload);
return $clone;
}
{{end_format=xml}}
{{format=json}}
static function create_command_from_json($payload)
{
$json = json_decode($payload);
$class = key($json);
require_once "$class.php";
$command = new $class;
$command->deserialize_json($json->$class);
return $command;
}
static function serialize_command_to_json($command)
{
$type = $command->get_type();
$json = json_decode('{"'.$type.'": {}}');
$command->serialize_json($json->$type);
return json_encode($json, JSON_PRETTY_PRINT);
}
static function clone_object($obj)
{
$payload = Factory::serialize_command_to_json($obj);
$clone = Factory::create_command_from_json($payload);
return $clone;
}
{{end_format=json}}
{{format=both}}
static function create_command_from_xml($payload)
{
$xml = simplexml_load_string($payload);
$class = $xml->getName();
require_once "$class.php";
$command = new $class;
$command->deserialize_xml($xml);
return $command;
}
static function serialize_command_to_xml($command)
{
$xml = simplexml_load_string('<'.$command->get_type().'/>');
$command->serialize_xml($xml);
return $xml->asXML();
}
static function create_command_from_json($payload)
{
$json = json_decode($payload);
$class = key($json);
require_once "$class.php";
$command = new $class;
$command->deserialize_json($json->$class);
return $command;
}
static function serialize_command_to_json($command)
{
$type = $command->get_type();
$json = json_decode('{"'.$type.'": {}}');
$command->serialize_json($json->$type);
return json_encode($json, JSON_PRETTY_PRINT);
}
static function clone_object($obj)
{
$payload = Factory::serialize_command_to_json($obj);
$clone = Factory::create_command_from_json($payload);
return $clone;
}
{{end_format=both}}
};
?>'''
| 26.185484 | 68 | 0.58454 | 333 | 3,247 | 5.432432 | 0.156156 | 0.092869 | 0.065782 | 0.059701 | 0.832504 | 0.81592 | 0.798231 | 0.798231 | 0.798231 | 0.798231 | 0 | 0 | 0.269788 | 3,247 | 123 | 69 | 26.398374 | 0.762969 | 0 | 0 | 0.528302 | 1 | 0 | 0.905422 | 0.344732 | 0 | 0 | 0 | 0 | 0 | 1 | 0.018868 | false | 0.009434 | 0.009434 | 0 | 0.150943 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0b45977fad914e944d2f59c5c028323b8f3a9c7d | 3,576 | py | Python | tests/test3.py | pombreda/pygir-ctypes | 8ea2ad6f6cc792a3edb6ced9c0027a3ac2c52ecb | [
"BSD-3-Clause"
] | null | null | null | tests/test3.py | pombreda/pygir-ctypes | 8ea2ad6f6cc792a3edb6ced9c0027a3ac2c52ecb | [
"BSD-3-Clause"
] | null | null | null | tests/test3.py | pombreda/pygir-ctypes | 8ea2ad6f6cc792a3edb6ced9c0027a3ac2c52ecb | [
"BSD-3-Clause"
] | null | null | null | # C-level Gtk.Window example
import os
import sys
sys.path.append('..')
from gir._girepository import *
def tmp1():
g_type_init()
Gir = g_irepository_get_default()
GObject = g_irepository_require(
Gir,
gchar_p('GObject'),
None,
G_IREPOSITORY_LOAD_FLAG_LAZY,
None
)
Gtk = g_irepository_require(
Gir,
gchar_p('Gtk'),
None,
G_IREPOSITORY_LOAD_FLAG_LAZY,
None
)
Gtk_Window = g_irepository_find_by_name(Gir, gchar_p('Gtk'), gchar_p('Window'))
info_print(Gtk_Window)
#~ n_interfaces = g_object_info_get_n_interfaces(cast(Gtk_Window, POINTER(GIObjectInfo)))
#~
#~ for i in range(n_interfaces):
#~ interface = g_object_info_get_interface(
#~ cast(Gtk_Window, POINTER(GIObjectInfo)),
#~ gint(i),
#~ )
#~ # info_print(interface)
parent = g_object_info_get_parent(
cast(Gtk_Window, POINTER(GIObjectInfo))
)
info_print(parent)
#~ n = g_object_info_get_n_methods(
#~ cast(parent, POINTER(GIObjectInfo))
#~ ).value
#~
#~ for i in range(n):
#~ info = g_object_info_get_method(
#~ cast(parent, POINTER(GIObjectInfo)),
#~ gint(i)
#~ )
#~ info_print(info)
#~ print()
parent = g_object_info_get_parent(
cast(parent, POINTER(GIObjectInfo))
)
info_print(parent)
#~ n = g_object_info_get_n_methods(
#~ cast(parent, POINTER(GIObjectInfo))
#~ ).value
#~
#~ for i in range(n):
#~ info = g_object_info_get_method(
#~ cast(parent, POINTER(GIObjectInfo)),
#~ gint(i)
#~ )
#~ info_print(info)
#~ print()
parent = g_object_info_get_parent(
cast(parent, POINTER(GIObjectInfo))
)
info_print(parent)
n = g_object_info_get_n_methods(
cast(parent, POINTER(GIObjectInfo))
).value
for i in range(n):
info = g_object_info_get_method(
cast(parent, POINTER(GIObjectInfo)),
gint(i)
)
info_print(info)
print()
parent = g_object_info_get_parent(
cast(parent, POINTER(GIObjectInfo))
)
info_print(parent)
#~ n = g_object_info_get_n_methods(
#~ cast(parent, POINTER(GIObjectInfo))
#~ ).value
#~
#~ for i in range(n):
#~ info = g_object_info_get_method(
#~ cast(parent, POINTER(GIObjectInfo)),
#~ gint(i)
#~ )
#~ info_print(info)
#~ print()
#~ Gtk_Window_new = g_object_info_find_method(
#~ cast(Gtk_Window, POINTER(GIObjectInfo)),
#~ gchar_p('new')
#~ )
#~ info_print(Gtk_Window_new)
#~ Gtk_Window_connect = g_object_info_find_method(
#~ cast(Gtk_Window, POINTER(GIObjectInfo)),
#~ gchar_p('connect')
#~ )
#~ info_print(Gtk_Window_connect)
#~ Gtk_main = g_irepository_find_by_name(gir, gchar_p('Gtk'), gchar_p('main'))
#~
#~ in_args = GIArgument()
#~ out_args = GIArgument()
#~ return_value = GIArgument()
#~
#~ g_function_info_invoke(
#~ cast(Gtk_main, POINTER(GIFunctionInfo)),
#~ pointer(in_args),
#~ 0,
#~ pointer(out_args),
#~ 0,
#~ pointer(return_value),
#~ None
#~ )
g_typelib_free(Gtk)
g_typelib_free(GObject)
if __name__ == '__main__':
g_type_init()
Gir = g_irepository_get_default()
GObject = g_irepository_require(
Gir,
gchar_p('GObject'),
None,
G_IREPOSITORY_LOAD_FLAG_LAZY,
None
)
Gtk = g_irepository_require(
Gir,
gchar_p('Gtk'),
None,
G_IREPOSITORY_LOAD_FLAG_LAZY,
None
)
Gtk_Window = g_irepository_find_by_name(Gir, gchar_p('Gtk'), gchar_p('Window'))
info_print(Gtk_Window)
n = g_object_info_get_n_methods(
cast(Gtk_Window, POINTER(GIObjectInfo))
).value
for i in range(n):
info = g_object_info_get_method(
cast(Gtk_Window, POINTER(GIObjectInfo)),
gint(i)
)
info_print(info)
print()
g_typelib_free(Gtk)
g_typelib_free(GObject)
| 20.434286 | 90 | 0.686521 | 490 | 3,576 | 4.604082 | 0.130612 | 0.075798 | 0.087766 | 0.099291 | 0.830231 | 0.781472 | 0.781472 | 0.768174 | 0.722518 | 0.683511 | 0 | 0.001009 | 0.168904 | 3,576 | 174 | 91 | 20.551724 | 0.758075 | 0.417506 | 0 | 0.780488 | 0 | 0 | 0.023833 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012195 | false | 0 | 0.036585 | 0 | 0.04878 | 0.121951 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0b4ba0dfde3f34552c961b04c37bde66220d4a54 | 728 | py | Python | python/intermediate/valid-parenthesis_Zelbot.py | saumyasingh048/hacktoberithms | 6eb5472e846650b73e75e04f717bdf500d171052 | [
"MIT"
] | 16 | 2018-10-05T07:35:06.000Z | 2021-10-02T12:12:52.000Z | python/intermediate/valid-parenthesis_Zelbot.py | saumyasingh048/hacktoberithms | 6eb5472e846650b73e75e04f717bdf500d171052 | [
"MIT"
] | 50 | 2018-10-04T00:04:24.000Z | 2019-10-25T16:29:58.000Z | python/intermediate/valid-parenthesis_Zelbot.py | saumyasingh048/hacktoberithms | 6eb5472e846650b73e75e04f717bdf500d171052 | [
"MIT"
] | 115 | 2018-10-04T02:42:18.000Z | 2021-01-27T17:34:21.000Z | def valid_parens(string_of_parens):
for i in range(len(string_of_parens)):
if "()" in string_of_parens:
string_of_parens = string_of_parens.replace("()", "")
if "[]" in string_of_parens:
string_of_parens = string_of_parens.replace("[]", "")
if "{}" in string_of_parens:
string_of_parens = string_of_parens.replace("{}", "")
if len(string_of_parens) == 0:
return True
return False
print(valid_parens('')) # Expected True
print(valid_parens('()')) # Expected True
print(valid_parens('()[]{}')) # Expected True
print(valid_parens('(]')) # Expected False
print(valid_parens('([)]')) # Expected False
print(valid_parens('{[]}')) # Expected True
| 36.4 | 65 | 0.627747 | 90 | 728 | 4.733333 | 0.2 | 0.225352 | 0.394366 | 0.328639 | 0.788732 | 0.788732 | 0.767606 | 0.767606 | 0.767606 | 0.631455 | 0 | 0.00173 | 0.206044 | 728 | 19 | 66 | 38.315789 | 0.735294 | 0.116758 | 0 | 0 | 0 | 0 | 0.04717 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.058824 | false | 0 | 0 | 0 | 0.176471 | 0.352941 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0b8dd1cf6064d46db785e02e60a618a41a930059 | 7,049 | py | Python | tests/fixtures/api-compensations.py | primitybio/cellengine-python-toolk | 1f9dd168f1f27e2beba69f02e340371190857b33 | [
"MIT"
] | 4 | 2021-01-12T17:03:37.000Z | 2021-12-16T13:23:57.000Z | tests/fixtures/api-compensations.py | primitybio/cellengine-python-toolk | 1f9dd168f1f27e2beba69f02e340371190857b33 | [
"MIT"
] | 61 | 2021-01-11T05:27:16.000Z | 2022-03-08T01:50:09.000Z | tests/fixtures/api-compensations.py | primitybio/cellengine-python-toolkit | 1f9dd168f1f27e2beba69f02e340371190857b33 | [
"MIT"
] | null | null | null | import pytest
@pytest.fixture(scope="session")
def compensations():
compensations = [
{
"__v": 0,
"_id": "5d64abe2ca9df61349ed8e95",
"channels": [
"Blue530-A",
"Blue695-A",
"Vio450-A",
"Vio525-A",
"Vio585-A",
"Vio605-A",
"Vio655-A",
"Vio710-A",
"UV450-A",
"UV530-A",
"Red670-A",
"Red730-A",
"Red780-A",
"YG582-A",
"YG610-A",
"YG670-A",
"YG710-A",
"YG780-A",
],
"experimentId": "5d64abe2ca9df61349ed8e78",
"name": "some compensation",
"spillMatrix": [
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
],
}
]
return compensations
| 19.526316 | 55 | 0.090226 | 382 | 7,049 | 1.657068 | 0.102094 | 0.913112 | 1.2891 | 1.611374 | 0.511848 | 0.511848 | 0.511848 | 0.511848 | 0.511848 | 0.511848 | 0 | 0.374653 | 0.846645 | 7,049 | 360 | 56 | 19.580556 | 0.210916 | 0 | 0 | 0.910615 | 0 | 0 | 0.03575 | 0.006809 | 0 | 0 | 0 | 0 | 0 | 1 | 0.002793 | false | 0 | 0.002793 | 0 | 0.00838 | 0 | 0 | 0 | 1 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
0bb075b44d70be813af3202a6deaaf059ec7db32 | 5,144 | py | Python | jdapi.py | didappear/TestProject | 014d0f45e57750bde75dd3002098c33d33ef5b76 | [
"Apache-2.0"
] | 1 | 2020-07-26T12:08:44.000Z | 2020-07-26T12:08:44.000Z | jdapi.py | didappear/TestProject | 014d0f45e57750bde75dd3002098c33d33ef5b76 | [
"Apache-2.0"
] | null | null | null | jdapi.py | didappear/TestProject | 014d0f45e57750bde75dd3002098c33d33ef5b76 | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python
# -*- coding:utf-8 -*-
# __author__ = "LJ"
# Date: 2019/1/16
import requests,pprint
'''
'''
request_url = 'api.m.jd.com'
cookies = 'abtest=20171112132916936_40; shshshfpa=d2d66407-afd2-bc74-d1c4-b38fc566ae65-1528783597; shshshfpb=2b18de2272ae34917bd0806352de6fc3a5b2767d781c8306017fc0edc4; __jdu=1526114150885499639638; pinId=rZj5v8dLtuLR8YKtwflTDbV9-x-f3wj7; pin=jd_54397e682436b; unick=jd_130522759; _tp=cakA%2Bu6VpcS3vX5rBe7ukhOnk5sC77jFk%2F7M0ngoAaE%3D; _pst=jd_54397e682436b; user-key=47ce9435-dfaf-4c09-a4b6-96bd23397a3c; PCSYCityID=1; ipLocation=%u5c71%u4e1c; cn=62; jcap_dvzw_fp=41f83b9a1a056118808a424fcf314d6615476334359942051506482; whwswswws=; __jdv=122270672|kong|t_1000000936_0|tuiguang|1fd96477eb864f5c82b23f255a2157f4|1547633621684; _contrast=100002100080.100000109473.100000109455.4939815; areaId=1; ipLoc-djd=1-2901-4135-0.137923460; __jdc=122270672; autoOpenApp_downCloseDate_auto=1547696889895_21600000; downloadAppPlugIn_downCloseDate=1547696917370_86400000; _contrast_status=show; 3AB9D23F7A4B3C9B=CYAIYHMV2QZ5FKG6OIJZF2EFKHYPXITMRC73DTHJOTOYUI3ZWBRLJAGHMCELXJKIRMGXMTDEWGTSISE4XHJK4DOBM4; wlfstk_smdl=6bxwq1cpnbfjw3cwc8ajdm7ocvtyzqq6; TrackID=12nxvzBXPqrlBw-JqL0K5mCd4JqmIUMsJNZQp-DoXL-quv_Df_AOXWnjnz9eqcX070VIrxzMIm6dEVNrpLZtRv9IOHC55gr8uex9zylgss2w; ceshi3.com=201; unpl=V2_ZzNtbRIDQRYiDEYBfRtdAWJRFg1KVRYTIg0WBCwRDABjUEJaclRCFX0UR1RnGVoUZwsZXUtcRhdFCEdkexhdBGYCE1VAUHMURQl2V3spXARXAxZeRVdDEHUMR11yHVkNbwQQVUVVSiVFD3ZkK1ECXjkzEFpHUkQVcwFFZHopXTUsbRMQQlNAEnUIQ1R%2fGFUMYwYaVUVVSxJ3AXZVSxo%3d; CCC_SE=ADC_wHgst6dyQf2KbCTxRy%2f4eZfz6rL%2f1M0dcFsxpDXmyFLNNUnW4FzIjDAjq%2bUXf2iQ%2fKyALodlA2v1Vk%2fxU4lqLny2%2fSTFu9Dd%2bB49pqtc0ZN9e3BE7lAu3dHPfaze140jaX0%2b3sk%2fQlHd%2bpk86Ao54J5drl51tKwZfWNfK32q3HQvcU6a7uKG72ssU0HI6aXa7VGztPh49LCI0%2btcgKjeRuS2tzp2ZCyxJvK9nzl6KksxmmOmTttn3xcqxnZUFNAqgOmcpLTJ%2bbBZ37UwBBIjeC0eVQnwWDSY%2bmyHpdObdN7azZeIY8%2bRWsK2apcm3kKHnmV%2fyWZTQZq1P2laFibv0v2GWwcPiooTOooKRVTABvWEilOv8oKFo0JqTltAaJPyNJSMpezAxP1ossJfC2ZujzMizMunqa%2bVm%2b26W03sjLr9KIPT24CggqSwcCUJTnKf1vCLxmh6vadq5%2bWzc6xiroZmgvV4RtRkYyLcUMSZVmvveeuiJxGUQV%2bqfqWgeknt; mobilev=html5; sid=a7f73f92ebb3e1d8053375898d2356a7; USER_FLAG_CHECK=83d3a534c14bdd37ad3163a67a64bc60; TrackerID=bCJf7Sp3sZmISDM2GiEimINvu2lxMuFod3BAkAxyInqBCcSrprzLEdFsdzpReezEdB-lH47oR22ze7t8X-XFplHo7rKYTGsw9AquKt7WTlA; pt_key=AAFcQGSzADD4D08OUyBVkkiF-x8V6vo3J6T2N35Ypc5cB-u3lv87Oh-RbwC8TFHEaVUzkcRtlSY; pt_pin=jd_54397e682436b; pt_token=slz4grl9; pwdt_id=jd_54397e682436b; wxa_level=1; retina=0; cid=9; webp=1; visitkey=36737167522817205; wq_ufc=83d3a534c14bdd37ad3163a67a64bc60; __wga=1547723958411.1547723958411.1547723958411.1547723958411.1.1; PPRD_P=UUID.1526114150885499639638; sc_width=1920; wq_area=13_1112_0%7C2; shshshfp=903cac5b5554d91e9e698ad5cb9d520b; __jda=122270672.1526114150885499639638.1526114151.1547712588.1547723998.21; __jdb=122270672.1.1526114150885499639638|21.1547723998; shshshsID=4b52ff156a70a6db91fc4ca7fe7c78d0_3_1547724000889; thor=56401E19FB0AD7CDBE28BCDF98857F748C7579004B9AC1F9EE3953156081DE96759AD2CD612B7F4B09508DCEBC2AFF4B666D244F7F0EF28E28C57C8259D3C818E40683DDF250E98E3B6D9489402F816A38EE3955076DD71344B0DF0DC4240B4225D5A82A65A22CD7B7A21AB02CFCDD91DE5346885BE67AF7560B004616953978A6525E9EC5579E32E93EE51C5871428E8F6D0C1B243D0D542687703D3D2D7BE3'
path = '/client.action?functionId=newBabelAwardCollection&body=%7B%22activityId%22%3A%22gLmT9mwp1wCyKvL58hvpRHC3kMP%22%2C%22scene%22%3A%221%22%2C%22args%22%3A%22key%3Dd807fdd0f2774d3290b3c84b55a4fbef%2CroleId%3D17254396%22%2C%22eid%22%3A%22CYAIYHMV2QZ5FKG6OIJZF2EFKHYPXITMRC73DTHJOTOYUI3ZWBRLJAGHMCELXJKIRMGXMTDEWGTSISE4XHJK4DOBM4%22%2C%22fp%22%3A%2221a873dabf9f64ec63a8ec770ce5bedc%22%2C%22pageClick%22%3A%22Babel_Coupon%22%2C%22mitemAddrId%22%3A%22%22%2C%22geo%22%3A%7B%22lng%22%3A%22%22%2C%22lat%22%3A%22%22%7D%7D&screen=750*1334&client=wh5&clientVersion=1.0.0&sid=a7f73f92ebb3e1d8053375898d2356a7&uuid=&area=&loginType=3&callback=jsonp4'
functionId = 'newBabelAwardCollection'
body = '''
{"activityId":"gLmT9mwp1wCyKvL58hvpRHC3kMP","scene":"1","args":"key=d807fdd0f2774d3290b3c84b55a4fbef,roleId=17254396","eid":"CYAIYHMV2QZ5FKG6OIJZF2EFKHYPXITMRC73DTHJOTOYUI3ZWBRLJAGHMCELXJKIRMGXMTDEWGTSISE4XHJK4DOBM4","fp":"21a873dabf9f64ec63a8ec770ce5bedc","pageClick":"Babel_Coupon","mitemAddrId":"","geo":{"lng":"","lat":""}}
'''
user_agent = 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36'
params = {
'screen': '750*1334',
'client': 'wh5',
'clientVersion':'1.0.0',
'sid':'a7f73f92ebb3e1d8053375898d2356a7',
'loginType':'3',
'callback':'jsonp4',
'functionId' : 'newBabelAwardCollection',
'body' : '''
{"activityId":"gLmT9mwp1wCyKvL58hvpRHC3kMP","scene":"1","args":"key=d807fdd0f2774d3290b3c84b55a4fbef,roleId=17254396","eid":"CYAIYHMV2QZ5FKG6OIJZF2EFKHYPXITMRC73DTHJOTOYUI3ZWBRLJAGHMCELXJKIRMGXMTDEWGTSISE4XHJK4DOBM4","fp":"21a873dabf9f64ec63a8ec770ce5bedc","pageClick":"Babel_Coupon","mitemAddrId":"","geo":{"lng":"","lat":""}}'
'''
}
req = requests.get(request_url,params=params,cookies={'cookies':cookies})
# retjson = req.json()
print(req) | 131.897436 | 3,145 | 0.860226 | 427 | 5,144 | 10.215457 | 0.64637 | 0.00917 | 0.025447 | 0.005502 | 0.186612 | 0.182027 | 0.182027 | 0.182027 | 0.182027 | 0.182027 | 0 | 0.306361 | 0.034215 | 5,144 | 39 | 3,146 | 131.897436 | 0.571659 | 0.018857 | 0 | 0.086957 | 0 | 0.217391 | 0.938232 | 0.849652 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.043478 | 0.086957 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e7f3e4760313af5dd2e82901ebc2b18b688ae9dd | 2,534 | py | Python | apps/base/migrations/0049_auto_20190129_0643.py | KolevDarko/lifehq | 88d92f5fe76f2fb6511f2a892e096d95a69985d8 | [
"MIT"
] | null | null | null | apps/base/migrations/0049_auto_20190129_0643.py | KolevDarko/lifehq | 88d92f5fe76f2fb6511f2a892e096d95a69985d8 | [
"MIT"
] | null | null | null | apps/base/migrations/0049_auto_20190129_0643.py | KolevDarko/lifehq | 88d92f5fe76f2fb6511f2a892e096d95a69985d8 | [
"MIT"
] | null | null | null | # Generated by Django 2.0 on 2019-01-29 06:43
from django.db import migrations
import hashid_field.field
class Migration(migrations.Migration):
dependencies = [
('base', '0047_personal_todo_items_migrate_to_projects'),
]
operations = [
migrations.AlterField(
model_name='personaltodoitem',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='personaltodolist',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='projectevent',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='projectlog',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='projecttodoitem',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='projecttodolist',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='workcycle',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='workcyclegroup',
name='id',
field=hashid_field.field.HashidAutoField(alphabet='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890', min_length=7, primary_key=True, serialize=False),
),
]
| 46.072727 | 177 | 0.693765 | 210 | 2,534 | 8.185714 | 0.247619 | 0.057592 | 0.08377 | 0.134962 | 0.816172 | 0.816172 | 0.816172 | 0.816172 | 0.816172 | 0.816172 | 0 | 0.053027 | 0.211129 | 2,534 | 54 | 178 | 46.925926 | 0.806903 | 0.016969 | 0 | 0.666667 | 1 | 0 | 0.267979 | 0.216955 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.041667 | 0 | 0.104167 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f06c266e2c712f296793164fb59a06e703d4f1a1 | 21,360 | py | Python | sdk/python/pulumi_cloudflare/access_identity_provider.py | pulumi/pulumi-cloudflare | d444af2fab6101b388a15cf2e3933e45e9935cc6 | [
"ECL-2.0",
"Apache-2.0"
] | 35 | 2019-03-14T21:29:29.000Z | 2022-03-30T00:00:59.000Z | sdk/python/pulumi_cloudflare/access_identity_provider.py | pulumi/pulumi-cloudflare | d444af2fab6101b388a15cf2e3933e45e9935cc6 | [
"ECL-2.0",
"Apache-2.0"
] | 128 | 2019-03-08T23:45:58.000Z | 2022-03-31T21:05:22.000Z | sdk/python/pulumi_cloudflare/access_identity_provider.py | pulumi/pulumi-cloudflare | d444af2fab6101b388a15cf2e3933e45e9935cc6 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2019-05-10T12:52:56.000Z | 2020-03-24T15:02:14.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['AccessIdentityProviderArgs', 'AccessIdentityProvider']
@pulumi.input_type
class AccessIdentityProviderArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
type: pulumi.Input[str],
account_id: Optional[pulumi.Input[str]] = None,
configs: Optional[pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AccessIdentityProvider resource.
:param pulumi.Input[str] name: Friendly name of the Access Identity Provider configuration.
:param pulumi.Input[str] type: The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
:param pulumi.Input[str] account_id: The account ID the provider should be associated with. Conflicts with `zone_id`.
:param pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]] configs: Provider configuration from the [developer documentation][access_identity_provider_guide].
:param pulumi.Input[str] zone_id: The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "type", type)
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if configs is not None:
pulumi.set(__self__, "configs", configs)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Friendly name of the Access Identity Provider configuration.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[str]]:
"""
The account ID the provider should be associated with. Conflicts with `zone_id`.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter
def configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]]]:
"""
Provider configuration from the [developer documentation][access_identity_provider_guide].
"""
return pulumi.get(self, "configs")
@configs.setter
def configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]]]):
pulumi.set(self, "configs", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
@pulumi.input_type
class _AccessIdentityProviderState:
def __init__(__self__, *,
account_id: Optional[pulumi.Input[str]] = None,
configs: Optional[pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AccessIdentityProvider resources.
:param pulumi.Input[str] account_id: The account ID the provider should be associated with. Conflicts with `zone_id`.
:param pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]] configs: Provider configuration from the [developer documentation][access_identity_provider_guide].
:param pulumi.Input[str] name: Friendly name of the Access Identity Provider configuration.
:param pulumi.Input[str] type: The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
:param pulumi.Input[str] zone_id: The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if configs is not None:
pulumi.set(__self__, "configs", configs)
if name is not None:
pulumi.set(__self__, "name", name)
if type is not None:
pulumi.set(__self__, "type", type)
if zone_id is not None:
pulumi.set(__self__, "zone_id", zone_id)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[str]]:
"""
The account ID the provider should be associated with. Conflicts with `zone_id`.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter
def configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]]]:
"""
Provider configuration from the [developer documentation][access_identity_provider_guide].
"""
return pulumi.get(self, "configs")
@configs.setter
def configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AccessIdentityProviderConfigArgs']]]]):
pulumi.set(self, "configs", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Friendly name of the Access Identity Provider configuration.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> Optional[pulumi.Input[str]]:
"""
The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
return pulumi.get(self, "zone_id")
@zone_id.setter
def zone_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone_id", value)
class AccessIdentityProvider(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AccessIdentityProviderConfigArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Cloudflare Access Identity Provider resource. Identity Providers are
used as an authentication or authorisation source within Access.
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
# one time pin
pin_login = cloudflare.AccessIdentityProvider("pinLogin",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
name="PIN login",
type="onetimepin")
# oauth
github_oauth = cloudflare.AccessIdentityProvider("githubOauth",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
configs=[cloudflare.AccessIdentityProviderConfigArgs(
client_id="example",
client_secret="secret_key",
)],
name="GitHub OAuth",
type="github")
# saml
jumpcloud_saml = cloudflare.AccessIdentityProvider("jumpcloudSaml",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
configs=[cloudflare.AccessIdentityProviderConfigArgs(
attributes=[
"email",
"username",
],
idp_public_cert=\"\"\"MIIDpDCCAoygAwIBAgIGAV2ka+55MA0GCSqGSIb3DQEBCwUAMIGSMQswCQ...GF/Q2/MHadws97cZg
uTnQyuOqPuHbnN83d/2l1NSYKCbHt24o
\"\"\",
issuer_url="jumpcloud",
sign_request=False,
sso_target_url="https://sso.myexample.jumpcloud.com/saml2/cloudflareaccess",
)],
name="JumpCloud SAML",
type="saml")
# okta
okta = cloudflare.AccessIdentityProvider("okta",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
configs=[cloudflare.AccessIdentityProviderConfigArgs(
api_token="okta_api_token",
client_id="example",
client_secret="secret_key",
)],
name="Okta",
type="okta")
```
Please refer to the [developers.cloudflare.com Access documentation][access_identity_provider_guide]
for full reference on what is available and how to configure your provider.
## Import
Access Identity Providers can be imported using a composite ID formed of account ID and Access Identity Provider ID.
```sh
$ pulumi import cloudflare:index/accessIdentityProvider:AccessIdentityProvider my_idp cb029e245cfdd66dc8d2e570d5dd3322/e00e1c13-e350-44fe-96c5-fb75c954871c
```
[access_identity_provider_guide]https://developers.cloudflare.com/access/configuring-identity-providers/
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_id: The account ID the provider should be associated with. Conflicts with `zone_id`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AccessIdentityProviderConfigArgs']]]] configs: Provider configuration from the [developer documentation][access_identity_provider_guide].
:param pulumi.Input[str] name: Friendly name of the Access Identity Provider configuration.
:param pulumi.Input[str] type: The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
:param pulumi.Input[str] zone_id: The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AccessIdentityProviderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Cloudflare Access Identity Provider resource. Identity Providers are
used as an authentication or authorisation source within Access.
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
# one time pin
pin_login = cloudflare.AccessIdentityProvider("pinLogin",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
name="PIN login",
type="onetimepin")
# oauth
github_oauth = cloudflare.AccessIdentityProvider("githubOauth",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
configs=[cloudflare.AccessIdentityProviderConfigArgs(
client_id="example",
client_secret="secret_key",
)],
name="GitHub OAuth",
type="github")
# saml
jumpcloud_saml = cloudflare.AccessIdentityProvider("jumpcloudSaml",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
configs=[cloudflare.AccessIdentityProviderConfigArgs(
attributes=[
"email",
"username",
],
idp_public_cert=\"\"\"MIIDpDCCAoygAwIBAgIGAV2ka+55MA0GCSqGSIb3DQEBCwUAMIGSMQswCQ...GF/Q2/MHadws97cZg
uTnQyuOqPuHbnN83d/2l1NSYKCbHt24o
\"\"\",
issuer_url="jumpcloud",
sign_request=False,
sso_target_url="https://sso.myexample.jumpcloud.com/saml2/cloudflareaccess",
)],
name="JumpCloud SAML",
type="saml")
# okta
okta = cloudflare.AccessIdentityProvider("okta",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
configs=[cloudflare.AccessIdentityProviderConfigArgs(
api_token="okta_api_token",
client_id="example",
client_secret="secret_key",
)],
name="Okta",
type="okta")
```
Please refer to the [developers.cloudflare.com Access documentation][access_identity_provider_guide]
for full reference on what is available and how to configure your provider.
## Import
Access Identity Providers can be imported using a composite ID formed of account ID and Access Identity Provider ID.
```sh
$ pulumi import cloudflare:index/accessIdentityProvider:AccessIdentityProvider my_idp cb029e245cfdd66dc8d2e570d5dd3322/e00e1c13-e350-44fe-96c5-fb75c954871c
```
[access_identity_provider_guide]https://developers.cloudflare.com/access/configuring-identity-providers/
:param str resource_name: The name of the resource.
:param AccessIdentityProviderArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AccessIdentityProviderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AccessIdentityProviderConfigArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AccessIdentityProviderArgs.__new__(AccessIdentityProviderArgs)
__props__.__dict__["account_id"] = account_id
__props__.__dict__["configs"] = configs
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
__props__.__dict__["zone_id"] = zone_id
super(AccessIdentityProvider, __self__).__init__(
'cloudflare:index/accessIdentityProvider:AccessIdentityProvider',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AccessIdentityProviderConfigArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None) -> 'AccessIdentityProvider':
"""
Get an existing AccessIdentityProvider resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_id: The account ID the provider should be associated with. Conflicts with `zone_id`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AccessIdentityProviderConfigArgs']]]] configs: Provider configuration from the [developer documentation][access_identity_provider_guide].
:param pulumi.Input[str] name: Friendly name of the Access Identity Provider configuration.
:param pulumi.Input[str] type: The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
:param pulumi.Input[str] zone_id: The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AccessIdentityProviderState.__new__(_AccessIdentityProviderState)
__props__.__dict__["account_id"] = account_id
__props__.__dict__["configs"] = configs
__props__.__dict__["name"] = name
__props__.__dict__["type"] = type
__props__.__dict__["zone_id"] = zone_id
return AccessIdentityProvider(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Output[Optional[str]]:
"""
The account ID the provider should be associated with. Conflicts with `zone_id`.
"""
return pulumi.get(self, "account_id")
@property
@pulumi.getter
def configs(self) -> pulumi.Output[Optional[Sequence['outputs.AccessIdentityProviderConfig']]]:
"""
Provider configuration from the [developer documentation][access_identity_provider_guide].
"""
return pulumi.get(self, "configs")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Friendly name of the Access Identity Provider configuration.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The provider type to use. Must be one of: `"centrify"`,
`"facebook"`, `"google-apps"`, `"oidc"`, `"github"`, `"google"`, `"saml"`,
`"linkedin"`, `"azureAD"`, `"okta"`, `"onetimepin"`, `"onelogin"`, `"yandex"`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="zoneId")
def zone_id(self) -> pulumi.Output[Optional[str]]:
"""
The zone ID the provider should be associated with. Conflicts with `account_id`.
"""
return pulumi.get(self, "zone_id")
| 44.5 | 205 | 0.632163 | 2,197 | 21,360 | 5.953573 | 0.106054 | 0.06896 | 0.057798 | 0.050459 | 0.850994 | 0.833563 | 0.82393 | 0.808333 | 0.801988 | 0.793731 | 0 | 0.016367 | 0.253418 | 21,360 | 479 | 206 | 44.592902 | 0.80385 | 0.470365 | 0 | 0.694175 | 1 | 0 | 0.105066 | 0.047436 | 0 | 0 | 0 | 0 | 0 | 1 | 0.15534 | false | 0.004854 | 0.033981 | 0 | 0.281553 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f07d6a3fd30cd4efeb7b642150c72fccc2ecb7da | 222 | py | Python | codecademy_scripts/print_square.py | Faraaz54/python_training_problems | 24c7b42daaf54366759e1d7c4b42f9936316e94b | [
"MIT"
] | null | null | null | codecademy_scripts/print_square.py | Faraaz54/python_training_problems | 24c7b42daaf54366759e1d7c4b42f9936316e94b | [
"MIT"
] | null | null | null | codecademy_scripts/print_square.py | Faraaz54/python_training_problems | 24c7b42daaf54366759e1d7c4b42f9936316e94b | [
"MIT"
] | null | null | null | def print_square():
for x in range(0,11):
if x in (0,5,10):
print '+' , ' -' * 4 , ' +' , ' -' * 4 , '+'
else:
print '|' , ' ' * 4 , ' |' , ' ' * 4 , '|'
print_square()
| 27.75 | 57 | 0.306306 | 24 | 222 | 2.75 | 0.583333 | 0.333333 | 0.212121 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089431 | 0.445946 | 222 | 7 | 58 | 31.714286 | 0.447154 | 0 | 0 | 0 | 0 | 0 | 0.074419 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.571429 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
652b82158f325294a6b04117daf347cd8017637a | 1,739 | py | Python | RecoMuon/MuonIsolationProducers/python/trackExtractorBlocks_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoMuon/MuonIsolationProducers/python/trackExtractorBlocks_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoMuon/MuonIsolationProducers/python/trackExtractorBlocks_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
# -*-TCL-*-
MIsoTrackExtractorCtfBlock = cms.PSet(
Diff_z = cms.double(0.2),
inputTrackCollection = cms.InputTag("generalTracks"),
BeamSpotLabel = cms.InputTag("offlineBeamSpot"),
ComponentName = cms.string('TrackExtractor'),
DR_Max = cms.double(0.5),
Diff_r = cms.double(0.1),
Chi2Prob_Min = cms.double(-1.0),
DR_Veto = cms.double(0.01),
NHits_Min = cms.uint32(0),
Chi2Ndof_Max = cms.double(1e+64),
Pt_Min = cms.double(-1.0),
DepositLabel = cms.untracked.string(''),
BeamlineOption = cms.string('BeamSpotFromEvent')
)
MIsoTrackExtractorBlock = cms.PSet(
Diff_z = cms.double(0.2),
inputTrackCollection = cms.InputTag("generalTracks"),
BeamSpotLabel = cms.InputTag("offlineBeamSpot"),
ComponentName = cms.string('TrackExtractor'),
DR_Max = cms.double(0.5),
Diff_r = cms.double(0.1),
Chi2Prob_Min = cms.double(-1.0),
DR_Veto = cms.double(0.01),
NHits_Min = cms.uint32(0),
Chi2Ndof_Max = cms.double(1e+64),
Pt_Min = cms.double(-1.0),
DepositLabel = cms.untracked.string(''),
BeamlineOption = cms.string('BeamSpotFromEvent')
)
MIsoTrackExtractorGsBlock = cms.PSet(
Diff_z = cms.double(0.2),
inputTrackCollection = cms.InputTag("ctfGSWithMaterialTracks"),
BeamSpotLabel = cms.InputTag("offlineBeamSpot"),
ComponentName = cms.string('TrackExtractor'),
DR_Max = cms.double(0.5),
Diff_r = cms.double(0.1),
Chi2Prob_Min = cms.double(-1.0),
DR_Veto = cms.double(0.01),
NHits_Min = cms.uint32(0),
Chi2Ndof_Max = cms.double(1e+64),
Pt_Min = cms.double(-1.0),
DepositLabel = cms.untracked.string(''),
BeamlineOption = cms.string('BeamSpotFromEvent')
)
| 34.098039 | 67 | 0.673951 | 217 | 1,739 | 5.290323 | 0.202765 | 0.164634 | 0.10453 | 0.067944 | 0.882404 | 0.882404 | 0.882404 | 0.882404 | 0.882404 | 0.882404 | 0 | 0.043599 | 0.169063 | 1,739 | 50 | 68 | 34.78 | 0.750865 | 0.005175 | 0 | 0.826087 | 0 | 0 | 0.108343 | 0.013326 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.021739 | 0 | 0.021739 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
33340009bb2a2b807cf289b1ef59751540906a32 | 688 | py | Python | test/testcases.py | cmorterud/flask-eztest | 3d0827c64e09c7787ec00f62ef8a94bb153ae9a5 | [
"MIT"
] | 1 | 2018-06-22T04:56:14.000Z | 2018-06-22T04:56:14.000Z | test/testcases.py | cmorterud/flask-eztest | 3d0827c64e09c7787ec00f62ef8a94bb153ae9a5 | [
"MIT"
] | null | null | null | test/testcases.py | cmorterud/flask-eztest | 3d0827c64e09c7787ec00f62ef8a94bb153ae9a5 | [
"MIT"
] | 2 | 2018-08-30T08:56:17.000Z | 2019-05-08T15:20:26.000Z |
from flaskeztest.eztestcase import EZTestCase
class TestCase1(EZTestCase):
FIXTURE = 'oneuser'
def runTest(self):
self.navigate('/one')
self.assertTrue(self.page.has_current_path("/one"))
self.assertTrue(self.page.has_text('Bob'))
self.assert_field_exists('User', 'name')
self.assert_field_exists('User', 'name')
class TestCase2(EZTestCase):
FIXTURE = 'oneuser'
def runTest(self):
self.navigate('/one')
self.assertTrue(self.page.has_current_path("/one"))
self.assertTrue(self.page.has_text('Bob'))
self.assert_field_exists('User', 'name')
self.assert_field_is_hidden('User', 'name')
| 25.481481 | 59 | 0.65407 | 82 | 688 | 5.304878 | 0.341463 | 0.064368 | 0.156322 | 0.193103 | 0.804598 | 0.804598 | 0.772414 | 0.772414 | 0.772414 | 0.772414 | 0 | 0.00363 | 0.199128 | 688 | 26 | 60 | 26.461538 | 0.785844 | 0 | 0 | 0.764706 | 0 | 0 | 0.098981 | 0 | 0 | 0 | 0 | 0 | 0.470588 | 1 | 0.117647 | false | 0 | 0.058824 | 0 | 0.411765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
335bbde29bd8c80d95d93f79dbc49c3d32005a96 | 217 | py | Python | moyasar/invoice.py | moyasar/moyasar-python | 8cf6de9e2ff5a98634b6751c1b1aa72ce0d48dd7 | [
"MIT"
] | 6 | 2019-03-03T19:02:35.000Z | 2019-09-29T04:49:40.000Z | moyasar/invoice.py | moyasar/moyasar-python | 8cf6de9e2ff5a98634b6751c1b1aa72ce0d48dd7 | [
"MIT"
] | 7 | 2019-03-04T14:50:59.000Z | 2021-06-01T23:25:29.000Z | moyasar/invoice.py | moyasar/moyasar-python | 8cf6de9e2ff5a98634b6751c1b1aa72ce0d48dd7 | [
"MIT"
] | 4 | 2019-03-03T19:03:28.000Z | 2021-07-15T03:10:55.000Z | from moyasar.actions.create import Create
from moyasar.resource import Resource
from moyasar.actions.cancel import Cancel
from moyasar.helpers import Format
class Invoice(Resource, Cancel, Create, Format):
pass
| 24.111111 | 48 | 0.815668 | 29 | 217 | 6.103448 | 0.413793 | 0.248588 | 0.20339 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129032 | 217 | 8 | 49 | 27.125 | 0.936508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.166667 | 0.666667 | 0 | 0.833333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 8 |
3367e18e1be0578435c94c745b419ecd98e1552f | 10,527 | py | Python | tests/test_mapreduce.py | File5/simple-mapreduce | 3a37b880656f4f27010e77266be9f64ea5d181b5 | [
"MIT"
] | 9 | 2021-02-19T16:01:27.000Z | 2021-03-11T20:51:59.000Z | tests/test_mapreduce.py | File5/simple-mapreduce | 3a37b880656f4f27010e77266be9f64ea5d181b5 | [
"MIT"
] | null | null | null | tests/test_mapreduce.py | File5/simple-mapreduce | 3a37b880656f4f27010e77266be9f64ea5d181b5 | [
"MIT"
] | 2 | 2021-02-19T16:51:29.000Z | 2021-02-27T01:00:20.000Z | from mapreduce import MapReduceTask
def test_flat_map():
def f(x):
yield 1
yield 2
from mapreduce.mapreduce import flat_map
assert list(flat_map(f, [1, 2])) == [1, 2, 1, 2]
# run pytest -s to see the output
def test_readme_example():
t = MapReduceTask(verbose=True, lazy=False)
# the order matters
@t.map
def m1(k, v):
yield v, 1
@t.reduce
def r1(k, v):
yield k, sum(v)
@t.map
def m2(k, v):
yield 'all', (k, v)
@t.reduce
def r2(k, v):
km, vm = None, None
for ki, vi in v:
if vm is None or vi > vm:
km, vm = ki, vi
yield 'max', (km, vm)
x = [1,2,3,1,2,1,4,5,6]
# print newline, so the output will be on the new line when run by pytest
print('')
assert list(t(x)) == [('max', (1, 3))]
def test_readme_example2():
t = MapReduceTask(verbose=True, lazy=False)
# the order matters
@t.map
def m1(k, v):
for word in v.split(' '):
yield word, 1
@t.reduce
def r1(k, v):
yield k, sum(v)
x = ["hello world word world of words"]
# print newline, so the output will be on the new line when run by pytest
print('')
assert list(t(x)) == [('hello', 1), ('world', 2), ('word', 1), ('of', 1), ('words', 1)]
def test_readme_example_lazy():
t = MapReduceTask(verbose=True, lazy=True)
# the order matters
@t.map
def m1(k, v):
yield v, 1 / 0 # will raise ZeroDivisionError if evaluated
@t.reduce
def r1(k, v):
yield k, sum(v)
@t.map
def m2(k, v):
yield 'all', (k, v)
@t.reduce
def r2(k, v):
km, vm = None, None
for ki, vi in v:
if vm is None or vi > vm:
km, vm = ki, vi
yield 'max', (km, vm)
x = [1,2,3,1,2,1,4,5,6]
# print newline, so the output will be on the new line when run by pytest
print('')
try:
t(x)
except ZeroDivisionError:
assert False, "should not be evaluated"
else:
assert True
def test_repeated_n_times():
t = MapReduceTask(verbose=True, lazy=False)
@t.map
def m1(k, v):
nonlocal start_node
n, neighbors = v
state = 1 if n == start_node else 0
yield n, (state, neighbors)
with t.repeated(3) as repeated:
@repeated.reduce
def r1(n, l):
state = 0
neighbors = []
for i in l:
state = max(state, i[0]) # i.state
neighbors += i[1] # i.neighbors
if state == 1:
for o in neighbors:
yield o, (1, [])
state = 2
yield n, (state, neighbors)
start_node = 'x'
x = {
'x': ['a', 'b', 'c'],
'a': ['e'],
'b': ['d'],
'c': ['d', 'x'],
}
assert list(t(x.items())) == [
('e', (2, [])),
('a', (2, ['e'])),
('d', (2, [])),
('b', (2, ['d'])),
('x', (2, ['a', 'b', 'c'])),
('c', (2, ['d', 'x']))
]
def test_repeated_inf_break():
t = MapReduceTask(verbose=True, lazy=False)
@t.map
def m1(k, v):
nonlocal start_node
n, neighbors = v
state = 1 if n == start_node else 0
yield n, (state, neighbors)
with t.repeated() as repeated:
@repeated.reduce
def r1(n, l):
state = 0
neighbors = []
for i in l:
state = max(state, i[0]) # i.state
neighbors += i[1] # i.neighbors
if state == 1:
for o in neighbors:
yield o, (1, [])
state = 2
yield n, (state, neighbors)
@repeated.map
def m2_break(k, v):
yield 'all', (k, v)
@repeated.reduce
def break_reduce(k, v):
for n, l in v:
if l[0] == 1:
break
else:
repeated.stop()
for ki, vi in v:
yield ki, vi
@t.reduce
def r2_back(k, v):
for ki, vi in v:
yield ki, vi
start_node = 'x'
x = {
'x': ['a', 'b', 'c'],
'a': ['e'],
'b': ['d'],
'c': ['d', 'x'],
}
# print newline, so the output will be on the new line when run by pytest
print('')
assert list(t(x.items())) == [
('e', (2, [])),
('a', (2, ['e'])),
('d', (2, [])),
('b', (2, ['d'])),
('x', (2, ['a', 'b', 'c'])),
('c', (2, ['d', 'x']))
]
def test_repeated_n_times_lazy():
t = MapReduceTask(verbose=True, lazy=True)
@t.map
def m1(k, v):
x = 1 / 0 # will raise ZeroDivisionError if evaluated
nonlocal start_node
n, neighbors = v
state = 1 if n == start_node else 0
yield n, (state, neighbors)
with t.repeated(3) as repeated:
@repeated.reduce
def r1(n, l):
state = 0
neighbors = []
for i in l:
state = max(state, i[0]) # i.state
neighbors += i[1] # i.neighbors
if state == 1:
for o in neighbors:
yield o, (1, [])
state = 2
yield n, (state, neighbors)
start_node = 'x'
x = {
'x': ['a', 'b', 'c'],
'a': ['e'],
'b': ['d'],
'c': ['d', 'x'],
}
# print newline, so the output will be on the new line when run by pytest
print('')
try:
t(x)
except ZeroDivisionError:
assert False, "should not be evaluated"
else:
assert True
def test_repeated_inf_break_lazy():
t = MapReduceTask(verbose=True, lazy=True)
@t.map
def m1(k, v):
x = 1 / 0 # will raise ZeroDivisionError if evaluated
nonlocal start_node
n, neighbors = v
state = 1 if n == start_node else 0
yield n, (state, neighbors)
with t.repeated() as repeated:
@repeated.reduce
def r1(n, l):
state = 0
neighbors = []
for i in l:
state = max(state, i[0]) # i.state
neighbors += i[1] # i.neighbors
if state == 1:
for o in neighbors:
yield o, (1, [])
state = 2
yield n, (state, neighbors)
@repeated.map
def m2_break(k, v):
yield 'all', (k, v)
@repeated.reduce
def break_reduce(k, v):
for n, l in v:
if l[0] == 1:
break
else:
repeated.stop()
for ki, vi in v:
yield ki, vi
@t.reduce
def r2_back(k, v):
for ki, vi in v:
yield ki, vi
start_node = 'x'
x = {
'x': ['a', 'b', 'c'],
'a': ['e'],
'b': ['d'],
'c': ['d', 'x'],
}
# print newline, so the output will be on the new line when run by pytest
print('')
try:
t(x)
except ZeroDivisionError:
assert False, "should not be evaluated"
else:
assert True
def test_repeated_inf_break_discarded():
t = MapReduceTask(verbose=True, lazy=False)
@t.map
def m1(k, v):
nonlocal start_node
n, neighbors = v
state = 1 if n == start_node else 0
yield n, (state, neighbors)
with t.repeated() as repeated:
@repeated.reduce
def r1(n, l):
state = 0
neighbors = []
for i in l:
state = max(state, i[0]) # i.state
neighbors += i[1] # i.neighbors
if state == 1:
for o in neighbors:
yield o, (1, [])
state = 2
yield n, (state, neighbors)
with repeated.discarded(verbose=True) as discarded:
@discarded.map
def m2_break(k, v):
yield 'all', (k, v)
@discarded.reduce
def break_reduce(k, v):
for n, l in v:
if l[0] == 1:
break
else:
repeated.stop()
# should have yield statement for the function to be generator
if False: yield 1
yield None, None
start_node = 'x'
x = {
'x': ['a', 'b', 'c'],
'a': ['e'],
'b': ['d'],
'c': ['d', 'x'],
}
# print newline, so the output will be on the new line when run by pytest
print('')
assert list(t(x.items())) == [
('e', (2, [])),
('a', (2, ['e'])),
('d', (2, [])),
('b', (2, ['d'])),
('x', (2, ['a', 'b', 'c'])),
('c', (2, ['d', 'x']))
]
def test_repeated_inf_break_discarded_lazy():
t = MapReduceTask(verbose=True, lazy=True)
@t.map
def m1(k, v):
x = 1 / 0 # will raise ZeroDivisionError if evaluated
nonlocal start_node
n, neighbors = v
state = 1 if n == start_node else 0
yield n, (state, neighbors)
with t.repeated() as repeated:
@repeated.reduce
def r1(n, l):
state = 0
neighbors = []
for i in l:
state = max(state, i[0]) # i.state
neighbors += i[1] # i.neighbors
if state == 1:
for o in neighbors:
yield o, (1, [])
state = 2
yield n, (state, neighbors)
with repeated.discarded(verbose=True) as discarded:
@discarded.map
def m2_break(k, v):
yield 'all', (k, v)
@discarded.reduce
def break_reduce(k, v):
for n, l in v:
if l[0] == 1:
break
else:
repeated.stop()
# should have yield statement for the function to be generator
if False: yield 1
# yield None, None # can yield anything since changes will be discarded
start_node = 'x'
x = {
'x': ['a', 'b', 'c'],
'a': ['e'],
'b': ['d'],
'c': ['d', 'x'],
}
# print newline, so the output will be on the new line when run by pytest
print('')
try:
t(x)
except ZeroDivisionError:
assert False, "should not be evaluated"
else:
assert True
| 25.184211 | 91 | 0.442576 | 1,364 | 10,527 | 3.370235 | 0.082845 | 0.013922 | 0.028714 | 0.052208 | 0.927779 | 0.925169 | 0.921253 | 0.904938 | 0.904938 | 0.904938 | 0 | 0.024458 | 0.417403 | 10,527 | 417 | 92 | 25.244604 | 0.725094 | 0.108388 | 0 | 0.903226 | 0 | 0 | 0.030258 | 0 | 0 | 0 | 0 | 0 | 0.041056 | 1 | 0.1261 | false | 0 | 0.005865 | 0 | 0.131965 | 0.02346 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
682bdfaeb0a1c52bd7f187ecf0e9b4d7c6227121 | 9,730 | py | Python | ALE/test.py | waggle-sensor/machinelearning | d076c8a5b3d340ca2520151e86f7d70b98bdaf1f | [
"MIT"
] | null | null | null | ALE/test.py | waggle-sensor/machinelearning | d076c8a5b3d340ca2520151e86f7d70b98bdaf1f | [
"MIT"
] | null | null | null | ALE/test.py | waggle-sensor/machinelearning | d076c8a5b3d340ca2520151e86f7d70b98bdaf1f | [
"MIT"
] | null | null | null | import unittest
from Data.data import *
from AL import algos
from engine import Engine
from Zoo import zoo
import tensorflow as tf
##############################################
class TestData(unittest.TestCase):
@classmethod
def setUp(cls) -> None:
pass
@classmethod
def tearDown(cls) -> None:
pass
def test_mnistLoader1(self):
print("\n"+"Running test_mnistLoader1 test.")
""" Test load mnist csv raw into train, val and cache with dataManager class """
if os.path.isfile("Data/DataSets/MNIST/train_cache.csv"):
os.remove("Data/DataSets/MNIST/train_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/val_cache.csv"):
os.remove("Data/DataSets/MNIST/val_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/ul_cache.csv"):
os.remove("Data/DataSets/MNIST/ul_cache.csv")
if os.path.isdir("Data/DataSets/MNIST/ul_cache"):
shutil.rmtree("Data/DataSets/MNIST/ul_cache")
split = (.2, .2, .6) # (train, val, unlabeled)
bins = 3
keep_bins = True
dataClass = ToyALoader(bins, keep_bins) # Declare data manager class
dataClass.parseData(split, bins, keep_bins)
dataClass.loadCaches()
dataClass.deleteCache()
def test_mnistLoader2(self):
print("\n"+"Running test_mnistLoader2 test.")
""" Test load mnist csv raw into train, val and cache with dataManager class """
if os.path.isfile("Data/DataSets/MNIST/train_cache.csv"):
os.remove("Data/DataSets/MNIST/train_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/val_cache.csv"):
os.remove("Data/DataSets/MNIST/val_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/ul_cache.csv"):
os.remove("Data/DataSets/MNIST/ul_cache.csv")
elif os.path.isdir("Data/DataSets/MNIST/ul_cache"):
shutil.rmtree("Data/DataSets/MNIST/ul_cache")
split = (.2, .2, .6) # (train, val, unlabeled)
bins = 1
keep_bins = True
dataClass = mnistLoader(bins, keep_bins) # Declare data manager class
dataClass.parseData(split, bins, keep_bins)
dataClass.loadCaches()
dataClass.deleteCache()
def test_mnistLoader3(self):
print("\n"+"Running test_mnistLoader3 test.")
""" Test load mnist csv raw into train, val and cache with dataManager class """
if os.path.isfile("Data/DataSets/MNIST/train_cache.csv"):
os.remove("Data/DataSets/MNIST/train_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/val_cache.csv"):
os.remove("Data/DataSets/MNIST/val_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/ul_cache.csv"):
os.remove("Data/DataSets/MNIST/ul_cache.csv")
elif os.path.isdir("Data/DataSets/MNIST/ul_cache"):
shutil.rmtree("Data/DataSets/MNIST/ul_cache")
split = (.2, .2, .6) # (train, val, unlabeled)
bins = 3
keep_bins = False
dataClass = mnistLoader(bins, keep_bins) # Declare data manager class
dataClass.parseData(split, bins, keep_bins)
dataClass.loadCaches()
dataClass.deleteCache()
def test_mnistLoader4(self):
print("\n"+"Running test_mnistLoader4 test.")
""" Test load mnist csv raw into train, val and cache with dataManager class """
if os.path.isfile("Data/DataSets/MNIST/train_cache.csv"):
os.remove("Data/DataSets/MNIST/train_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/val_cache.csv"):
os.remove("Data/DataSets/MNIST/val_cache.csv")
if os.path.isfile("Data/DataSets/MNIST/ul_cache.csv"):
os.remove("Data/DataSets/MNIST/ul_cache.csv")
elif os.path.isdir("Data/DataSets/MNIST/ul_cache"):
shutil.rmtree("Data/DataSets/MNIST/ul_cache")
split = (.2, .2, .6) # (train, val, unlabeled)
bins = 1
keep_bins = False
dataClass = mnistLoader(bins, keep_bins) # Declare data manager class
dataClass.parseData(split, bins, keep_bins)
dataClass.loadCaches()
dataClass.deleteCache()
def test_run_ToyA(self):
print("\n" + "Running test_run_ToyA test.")
# | ----------------------------
# | 1. Select data
# | ---------------------------
# DataManager parameters
split = (.1, .1, .8) # (train, val, unlabeled)
bins = 1
keep_bins = False
dataClass = ToyALoader(bins, keep_bins) # Declare data manager class
dataClass.parseData(split, bins, keep_bins)
dataClass.loadCaches()
# | ----------------------------
# | 2. Select Active Learning algorithm
# | ----------------------------
algo = algos.leastConfidence() # Randomly selects samples from each round's cache
algo.reset()
# | ----------------------------
# | 3. Select model
# | ----------------------------
modelName = "ToyA_NN" # Pick pre-made model
metrics = [tf.keras.metrics.MeanSquaredError(), tf.keras.metrics.KLDivergence()]
zk = zoo.zooKeeper(modelName, show_model=True, metrics=metrics) # Load model and compile
# | ----------------------------
# | 4. Run algorithm and log results
# | ----------------------------
# Declare engine
sample_size = 50
engine = Engine(algo, dataClass, zk, sample_size)
# Initial training of model on original training data
engine.initialTrain(epochs=1, batch_size=32, val=True, plot=False)
# Run active learning algo
# Round is how many times the active learning algo samples
# cycles is how many epochs the model is retrained each time a round occurs of sampling
engine.run(rounds=1, cycles=1, batch_size=32, val=True, plot=False)
dataClass.deleteCache()
def test_run_MNIST(self):
print("\n" + "Running test_run_MNIST test.")
# | ----------------------------
# | 1. Select data
# | ---------------------------
# DataManager parameters
split = (.1, .1, .8) # (train, val, unlabeled)
bins = 1
keep_bins = False
dataClass = mnistLoader(bins, keep_bins) # Declare data manager class
dataClass.parseData(split, bins, keep_bins)
dataClass.loadCaches()
# | ----------------------------
# | 2. Select Active Learning algorithm
# | ----------------------------
algo = algos.leastConfidence() # Randomly selects samples from each round's cache
algo.reset()
# | ----------------------------
# | 3. Select model
# | ----------------------------
modelName = "mnistCNN" # Pick pre-made model
metrics = [tf.keras.metrics.MeanSquaredError(), tf.keras.metrics.KLDivergence()]
zk = zoo.zooKeeper(modelName, show_model=True, metrics=metrics) # Load model and compile
# | ----------------------------
# | 4. Run algorithm and log results
# | ----------------------------
# Declare engine
sample_size = 50
engine = Engine(algo, dataClass, zk, sample_size)
# Initial training of model on original training data
engine.initialTrain(epochs=1, batch_size=32, val=True, plot=False)
# Run active learning algo
# Round is how many times the active learning algo samples
# cycles is how many epochs the model is retrained each time a round occurs of sampling
engine.run(rounds=1, cycles=1, batch_size=32, val=True, plot=False)
dataClass.deleteCache()
def test_customAlgo(self):
""" Check to see if custom children class AL algo are created with accordance to inheritance """
# Test passive learning
algo = algos.uniformSample()
self.assertTrue(algos.alAlgo.__subclasshook__(algo))
# Test ratio confidence
algo = algos.ratioConfidence()
self.assertTrue(algos.alAlgo.__subclasshook__(algo))
# Test least confidence
algo = algos.leastConfidence()
self.assertTrue(algos.alAlgo.__subclasshook__(algo))
# Test margin confidence
algo = algos.marginConfidence()
self.assertTrue(algos.alAlgo.__subclasshook__(algo))
# Test entropy
algo = algos.entropy()
self.assertTrue(algos.alAlgo.__subclasshook__(algo))
def test_customModel(self):
""" Check to see if custom children class zoo.customModel are created with accordance to inheritance """
# Test mnsitCNN
modelName = "mnistCNN" # Pick pre-made model
metrics = [tf.keras.metrics.Accuracy()]
zk = zoo.zooKeeper(modelName, show_model=False, metrics=metrics)
self.assertTrue(zoo.customModel.__subclasshook__(zk))
# Test ToyA_NN
modelName = "ToyA_NN" # Pick pre-made model
metrics = [tf.keras.metrics.Accuracy()]
zk = zoo.zooKeeper(modelName, show_model=False, metrics=metrics)
self.assertTrue(zoo.customModel.__subclasshook__(zk))
# Test ToyA_NN
modelName = "cifar10CNN" # Pick pre-made model
metrics = [tf.keras.metrics.Accuracy()]
zk = zoo.zooKeeper(modelName, show_model=False, metrics=metrics)
self.assertTrue(zoo.customModel.__subclasshook__(zk))
def test_zooKeeperRaiseNoModel(self):
""" This test checks if error is raised from zooKeeper.getmodel() for non existent model"""
self.assertRaises(ImportError,zoo.zooKeeper,"dummy_model")
##############################################
if __name__ == "__main__":
unittest.main() | 38.458498 | 112 | 0.595067 | 1,103 | 9,730 | 5.13418 | 0.154125 | 0.067809 | 0.096062 | 0.053682 | 0.868444 | 0.852198 | 0.836482 | 0.790217 | 0.777856 | 0.777856 | 0 | 0.008177 | 0.245838 | 9,730 | 253 | 113 | 38.458498 | 0.76356 | 0.22518 | 0 | 0.772414 | 0 | 0 | 0.180837 | 0.145351 | 0 | 0 | 0 | 0 | 0.062069 | 1 | 0.075862 | false | 0.013793 | 0.048276 | 0 | 0.131034 | 0.041379 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
6858a37c7f85098c8ff3e6ca3a59ab1f3805fb14 | 65 | py | Python | addons14/attachment_category/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | 1 | 2021-06-10T14:59:13.000Z | 2021-06-10T14:59:13.000Z | addons14/attachment_category/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | null | null | null | addons14/attachment_category/models/__init__.py | odoochain/addons_oca | 55d456d798aebe16e49b4a6070765f206a8885ca | [
"MIT"
] | 1 | 2021-04-09T09:44:44.000Z | 2021-04-09T09:44:44.000Z | from . import ir_attachment_category
from . import ir_attachment
| 21.666667 | 36 | 0.846154 | 9 | 65 | 5.777778 | 0.555556 | 0.384615 | 0.461538 | 0.846154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.123077 | 65 | 2 | 37 | 32.5 | 0.912281 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
6886c700318c624f6fa80f78930f23b77ea95e79 | 4,698 | py | Python | tests/test_to_string.py | vivacebelles/qcdb | 5bbdcb5c833277647a36bb0a5982abb56bf29b20 | [
"BSD-3-Clause"
] | 1 | 2019-02-20T20:18:02.000Z | 2019-02-20T20:18:02.000Z | tests/test_to_string.py | vivacebelles/qcdb | 5bbdcb5c833277647a36bb0a5982abb56bf29b20 | [
"BSD-3-Clause"
] | null | null | null | tests/test_to_string.py | vivacebelles/qcdb | 5bbdcb5c833277647a36bb0a5982abb56bf29b20 | [
"BSD-3-Clause"
] | null | null | null | import sys
import pytest
from utils import *
from addons import *
subject1 = """
3 au
Co 0 0 0
H 2 0 0
h_OTher -2 0 0
"""
ans1_au = """3 au
CoH2
Co 0.000000000000 0.000000000000 0.000000000000
H 2.000000000000 0.000000000000 0.000000000000
H -2.000000000000 -0.000000000000 0.000000000000"""
ans1_ang = """3
CoH2
Co 0.000000000000 0.000000000000 0.000000000000
H 1.058354417180 0.000000000000 0.000000000000
H -1.058354417180 -0.000000000000 0.000000000000"""
ans1c_ang = """3
CoH2
59Co 0.00000000 0.00000000 0.00000000
1H 1.05835442 0.00000000 0.00000000
1H_other -1.05835442 -0.00000000 0.00000000"""
#subject2 = """
#Co 0 0 0
#units au
#no_reorient
#--
#@H 2 0 0
#h_OTher -2 0 0
#"""
#
#ans2_au = """3 au
#
#Co 0.000000000000 0.000000000000 0.000000000000
#@H 2.000000000000 0.000000000000 0.000000000000
#H -2.000000000000 0.000000000000 0.000000000000"""
#
#ans2_ang = """3
#
#Co 0.000000000000 0.000000000000 0.000000000000
#Gh(1) 1.058354417180 0.000000000000 0.000000000000
#H -1.058354417180 0.000000000000 0.000000000000"""
#
#ans2c_ang = """2
#
#Co 0.000000000000 0.000000000000 0.000000000000
#H -1.058354417180 0.000000000000 0.000000000000"""
subject2 = """
Co 0 0 0
no_reorient
--
@H 1.05835441718 0 0
h_OTher -1.05835441718 0 0
"""
ans2_au = """3 au
CoH2
Co 0.000000000000 0.000000000000 0.000000000000
@H 2.000000000000 0.000000000000 0.000000000000
H -2.000000000000 0.000000000000 0.000000000000"""
ans2_ang = """3
CoH2
Co 0.000000000000 0.000000000000 0.000000000000
Gh(1) 1.058354417180 0.000000000000 0.000000000000
H -1.058354417180 0.000000000000 0.000000000000"""
ans2c_ang = """2
CoH2
Co 0.000000000000 0.000000000000 0.000000000000
H -1.058354417180 0.000000000000 0.000000000000"""
def test_toxyz_1a():
subject = subject1
mol = qcdb.Molecule(subject)
xyz = mol.to_string(dtype='xyz', units='Bohr')
assert compare_strings(ans1_au, xyz, sys._getframe().f_code.co_name)
def test_toxyz_1b():
subject = subject1
mol = qcdb.Molecule(subject)
xyz = mol.to_string(dtype='xyz', units='Angstrom')
assert compare_strings(ans1_ang, xyz, sys._getframe().f_code.co_name)
def test_toxyz_1c():
subject = subject1
mol = qcdb.Molecule(subject)
xyz = mol.to_string(dtype='xyz', prec=8, atom_format='{elea}{elem}{elbl}')
print(xyz)
assert compare_strings(ans1c_ang, xyz, sys._getframe().f_code.co_name)
#def test_toxyz_2a():
# subject = subject2
# mol = qcdb.Molecule(subject)
#
# xyz = mol.to_string(dtype='xyz', units='Bohr')
#
# assert compare_strings(ans2_au, xyz, sys._getframe().f_code.co_name)
#
#def test_toxyz_2b():
# subject = subject2
# mol = qcdb.Molecule(subject)
#
# xyz = mol.to_string(dtype='xyz', units='Angstrom', ghost_format='Gh({elez})')
#
# assert compare_strings(ans2_ang, xyz, sys._getframe().f_code.co_name)
#
#def test_toxyz_2c():
# subject = subject2
# mol = qcdb.Molecule(subject)
#
# xyz = mol.to_string(dtype='xyz', units='Angstrom', ghost_format='')
#
# assert compare_strings(ans2c_ang, xyz, sys._getframe().f_code.co_name)
def test_toxyz_2a():
subject = subject2
mol = qcdb.Molecule(subject)
xyz = mol.to_string(dtype='xyz', units='Bohr')
assert compare_strings(ans2_au, xyz, sys._getframe().f_code.co_name)
def test_toxyz_2b():
subject = subject2
mol = qcdb.Molecule(subject)
xyz = mol.to_string(dtype='xyz', units='Angstrom', ghost_format='Gh({elez})')
assert compare_strings(ans2_ang, xyz, sys._getframe().f_code.co_name)
def test_toxyz_2c():
subject = subject2
mol = qcdb.Molecule(subject)
xyz = mol.to_string(dtype='xyz', units='Angstrom', ghost_format='')
assert compare_strings(ans2c_ang, xyz, sys._getframe().f_code.co_name)
@using_psi4_molrec
def test_toxyz_3a():
import psi4
subject = subject2
mol = psi4.core.Molecule.from_string(subject)
xyz = mol.to_string(dtype='xyz', units='Bohr')
assert compare_strings(ans2_au, xyz, sys._getframe().f_code.co_name)
| 27.635294 | 82 | 0.60728 | 603 | 4,698 | 4.560531 | 0.12272 | 0.245818 | 0.327273 | 0.283636 | 0.870545 | 0.853091 | 0.828 | 0.828 | 0.828 | 0.818545 | 0 | 0.309859 | 0.274585 | 4,698 | 169 | 83 | 27.798817 | 0.497066 | 0.289698 | 0 | 0.416667 | 0 | 0 | 0.461071 | 0 | 0 | 0 | 0 | 0 | 0.083333 | 1 | 0.083333 | false | 0 | 0.059524 | 0 | 0.142857 | 0.011905 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
cc0efc93bee5ded0c68210245a40f6d6771b72a5 | 300 | py | Python | src/kgm/models/__init__.py | mberr/ea-active-learning | 0d4762b54b9ca98b7c76e2471bad8bc639eaf1ab | [
"MIT"
] | 4 | 2021-01-04T16:45:35.000Z | 2021-08-30T14:15:15.000Z | src/kgm/models/__init__.py | mberr/ea-active-learning | 0d4762b54b9ca98b7c76e2471bad8bc639eaf1ab | [
"MIT"
] | null | null | null | src/kgm/models/__init__.py | mberr/ea-active-learning | 0d4762b54b9ca98b7c76e2471bad8bc639eaf1ab | [
"MIT"
] | null | null | null | # coding=utf-8
from .matching import (
AbstractKGMatchingModel,
EdgeWeightsEnum,
GCNAlign,
KGMatchingModel,
get_matching_model_by_name,
)
__all__ = [
'AbstractKGMatchingModel',
'EdgeWeightsEnum',
'GCNAlign',
'KGMatchingModel',
'get_matching_model_by_name',
]
| 17.647059 | 33 | 0.696667 | 25 | 300 | 7.88 | 0.6 | 0.385787 | 0.467005 | 0.619289 | 0.84264 | 0.84264 | 0.84264 | 0.84264 | 0.84264 | 0 | 0 | 0.004202 | 0.206667 | 300 | 16 | 34 | 18.75 | 0.823529 | 0.04 | 0 | 0 | 0 | 0 | 0.304196 | 0.171329 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
cc2473c5e44e4b8fa4fd234c4b61c99495402bb9 | 2,598 | py | Python | projects/controllers/datasets.py | Matheus158257/projects | 26a6148046533476e625a872a2950c383aa975a8 | [
"Apache-2.0"
] | null | null | null | projects/controllers/datasets.py | Matheus158257/projects | 26a6148046533476e625a872a2950c383aa975a8 | [
"Apache-2.0"
] | null | null | null | projects/controllers/datasets.py | Matheus158257/projects | 26a6148046533476e625a872a2950c383aa975a8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Datasets controller."""
import platiagro
from werkzeug.exceptions import NotFound
from .utils import raise_if_experiment_does_not_exist, \
raise_if_project_does_not_exist, pagination_datasets
from ..models import Operator
def get_dataset(project_id, experiment_id, operator_id):
"""Retrieves a dataset as json.
Args:
project_id (str): the project uuid.
experiment_id (str): the experiment uuid.
operator_id (str): the operator uuid.
"""
raise_if_project_does_not_exist(project_id)
raise_if_experiment_does_not_exist(experiment_id)
operator = Operator.query.get(operator_id)
if operator is None:
raise NotFound("The specified operator does not exist")
# get dataset name
dataset = operator.parameters.get('dataset')
if dataset is None:
raise NotFound()
try:
metadata = platiagro.stat_dataset(name=dataset, operator_id=operator_id)
if "run_id" not in metadata:
raise FileNotFoundError()
dataset = platiagro.load_dataset(name=dataset,
run_id="latest",
operator_id=operator_id)
dataset = dataset.to_dict(orient="split")
del dataset["index"]
except FileNotFoundError as e:
raise NotFound(str(e))
return dataset
def get_dataset_pagination(project_id, experiment_id, operator_id, page, page_size):
"""Retrieves a dataset as json.
Args:
project_id (str): the project uuid.
experiment_id (str): the experiment uuid.
operator_id (str): the operator uuid.
"""
raise_if_project_does_not_exist(project_id)
raise_if_experiment_does_not_exist(experiment_id)
operator = Operator.query.get(operator_id)
if operator is None:
raise NotFound("The specified operator does not exist")
# get dataset name
dataset = operator.parameters.get('dataset')
if dataset is None:
raise NotFound()
try:
metadata = platiagro.stat_dataset(name=dataset, operator_id=operator_id)
if "run_id" not in metadata:
raise FileNotFoundError()
dataset = platiagro.load_dataset(name=dataset,
run_id="latest",
operator_id=operator_id)
dataset = dataset.to_dict(orient="split")
del dataset["index"]
except FileNotFoundError as e:
raise NotFound(str(e))
return pagination_datasets(page=page, page_size=page_size, elements=dataset)
| 31.301205 | 84 | 0.653195 | 310 | 2,598 | 5.235484 | 0.193548 | 0.08626 | 0.05915 | 0.046827 | 0.847197 | 0.847197 | 0.775108 | 0.775108 | 0.775108 | 0.775108 | 0 | 0.000524 | 0.265204 | 2,598 | 82 | 85 | 31.682927 | 0.84966 | 0.155889 | 0 | 0.808511 | 0 | 0 | 0.061885 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.042553 | false | 0 | 0.085106 | 0 | 0.170213 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
0bd0cc8dae997d58d8c8091334229ac7b3654396 | 168 | py | Python | results/out-picl/cpu2006/zeusmp/picl/sim.scripts.py | kleberkruger/donuts | 99a7c5885fcb6d252a47a4cb74ca714f8ba12ca6 | [
"MIT"
] | null | null | null | results/out-picl/cpu2006/zeusmp/picl/sim.scripts.py | kleberkruger/donuts | 99a7c5885fcb6d252a47a4cb74ca714f8ba12ca6 | [
"MIT"
] | null | null | null | results/out-picl/cpu2006/zeusmp/picl/sim.scripts.py | kleberkruger/donuts | 99a7c5885fcb6d252a47a4cb74ca714f8ba12ca6 | [
"MIT"
] | null | null | null | import sys
sys.argv = [ "/home/kleberkruger/PiCL/sniper/scripts/stop-by-icount.py", "1000000000" ]
execfile("/home/kleberkruger/PiCL/sniper/scripts/stop-by-icount.py")
| 42 | 87 | 0.761905 | 24 | 168 | 5.333333 | 0.583333 | 0.25 | 0.3125 | 0.40625 | 0.734375 | 0.734375 | 0.734375 | 0.734375 | 0.734375 | 0 | 0 | 0.062893 | 0.053571 | 168 | 3 | 88 | 56 | 0.742138 | 0 | 0 | 0 | 0 | 0 | 0.72619 | 0.666667 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 13 |
040a5a689d1d71e0f28a72b9eed6f440a4944205 | 13,174 | py | Python | code/boxplot.py | HurryZhao/boxplot | 50c42ce92cc8a487e6887cf42c66379011499182 | [
"MIT"
] | 4 | 2020-11-09T13:53:41.000Z | 2020-12-10T15:03:55.000Z | code/boxplot.py | HurryZhao/boxplot | 50c42ce92cc8a487e6887cf42c66379011499182 | [
"MIT"
] | null | null | null | code/boxplot.py | HurryZhao/boxplot | 50c42ce92cc8a487e6887cf42c66379011499182 | [
"MIT"
] | 1 | 2020-11-09T13:28:10.000Z | 2020-11-09T13:28:10.000Z | import matplotlib.pyplot as plt
import numpy as np
import matplotlib.patches as mpathes
import random
class boxplot:
def boxplot(ax,
Data,
outlier=True,
box_facecolor='white',
box_edgecolor='k',
outlier_facecolor='r',
outlier_edgecolor='r',
whisker_edgecolor='k',
median_edgecolor='k',
box_alpha=1.0,
outlier_alpha=1.0):
try:
h = max(max(p) for p in Data) + 0.1 * abs(max(max(p) for p in Data))
l = min(min(p) for p in Data) + 0.1 * abs(min(min(p) for p in Data))
except:
print('Wrong data type, please input a list of numerical list')
return 'Wrong data type, please input a list of numerical list'
count = len(Data)
a = (h - l) / 2000
if outlier == True:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) for x in range(count)]
else:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) / a for x in range(count)]
ax.axis('equal')
i = 0
for data in Data:
data = sorted(data)
# percentile
p = [0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75]
pen = [round((len(data) + 1) * x, 2) for x in p]
d = [np.quantile(data, j) for j in p]
# outlier
IQR = d[-1] - d[0]
upper = d[-1] + 1.5 * IQR
lower = d[0] - 1.5 * IQR
Upper = min(upper, data[-1])
Lower = max(lower, data[0])
outliers = []
for p in data:
if p > upper or p < lower:
outliers.append(p)
if outlier == True:
for p in outliers:
rect = mpathes.Ellipse((center[i], p), 0.04 * center[-1], 0.04 * center[-1],
ec=outlier_edgecolor, fc=outlier_facecolor, alpha=outlier_alpha)
ax.add_patch(rect)
# whisker
ax.hlines(Upper, center[i] - 0.1 * center[0], center[i] + 0.1 * center[0], whisker_edgecolor)
ax.hlines(Lower, center[i] - 0.1 * center[0], center[i] + 0.1 * center[0], whisker_edgecolor)
ax.vlines(center[i], Lower, d[0], whisker_edgecolor)
ax.vlines(center[i], d[-1], Upper, whisker_edgecolor)
# median
ax.hlines(d[5], center[i] - 0.2 * center[0], center[i] + 0.2 * center[0], median_edgecolor, lw=3)
# box
rect = mpathes.Rectangle((center[i] - 0.2 * center[0], d[0]), 0.4 * center[0], d[-1] - d[0],
ec=box_edgecolor, fc=box_facecolor, alpha=box_alpha)
ax.add_patch(rect)
i += 1
plt.show()
def info_boxplot(ax,
Data,
multiplebox=True,
outlier=True,
box_facecolor='white',
box_edgecolor='k',
outlier_facecolor='r',
outlier_edgecolor='r',
whisker_edgecolor='k',
median_edgecolor='k',
box_alpha=1.0,
outlier_alpha=1.0):
try:
h = max(max(p) for p in Data) + 0.1 * abs(max(max(p) for p in Data))
l = min(min(p) for p in Data) + 0.1 * abs(min(min(p) for p in Data))
except:
print('Wrong data type, please input a list of numerical list')
return 'Wrong data type, please input a list of numerical list'
count = len(Data)
a = (h - l) / 2000
if outlier == True:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) for x in range(count)]
else:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) / a for x in range(count)]
print(center)
ax.axis('equal')
i = 0
for data in Data:
# percentile
p = [0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75]
pen = [round((len(data) + 1) * x, 8) for x in p]
data = sorted(data)
d = [np.quantile(data, i) for i in p]
# outlier
IQR = d[-1] - d[0]
upper = d[-1] + 1.5 * IQR
lower = d[0] - 1.5 * IQR
Upper = min(upper, data[-1])
Lower = max(lower, data[0])
outliers = []
for p in data:
if p > upper or p < lower:
outliers.append(p)
if outlier == True:
for p in outliers:
rect = mpathes.Ellipse((center[i], p), 0.04 * center[-1], 0.04 * center[-1],
ec=outlier_edgecolor, fc=outlier_facecolor, alpha=outlier_alpha)
ax.add_patch(rect)
# whisker
ax.hlines(Upper, center[i] - 0.1 * center[0], center[i] + 0.1 * center[0], whisker_edgecolor)
ax.hlines(Lower, center[i] - 0.1 * center[0], center[i] + 0.1 * center[0], whisker_edgecolor)
ax.vlines(center[i], Lower, d[0], whisker_edgecolor)
ax.vlines(center[i], d[-1], Upper, whisker_edgecolor)
# median
ax.hlines(d[5], center[i] - 0.2 * center[0], center[i] + 0.2 * center[0], median_edgecolor, lw=3)
# multiplebox
if multiplebox == True:
for x in d:
ax.hlines(d, center[i] - 0.2 * center[0], center[i] + 0.2 * center[0], box_edgecolor, lw=1)
# box
rect = mpathes.Rectangle((center[i] - 0.2 * center[0], d[0]), 0.4 * center[0], d[-1] - d[0],
ec=box_edgecolor, fc=box_facecolor, alpha=box_alpha)
ax.add_patch(rect)
i += 1
plt.show()
def hist_boxplot(ax,
Data,
n_bins=10,
outlier=True,
box_facecolor='white',
box_edgecolor='k',
outlier_facecolor='r',
outlier_edgecolor='r',
whisker_edgecolor='k',
median_edgecolor='k',
bin_facecolor='#CECECE',
bin_edgecolor='k',
box_alpha=1.0,
outlier_alpha=1.0,
hist_alpha=1.0):
i = 0
try:
h = max(max(p) for p in Data) + 0.1 * abs(max(max(p) for p in Data))
l = min(min(p) for p in Data) + 0.1 * abs(min(min(p) for p in Data))
except:
print('Wrong data type, please input a list of numerical list')
return 'Wrong data type, please input a list of numerical list'
count = len(Data)
a = (h - l) / 2000
if outlier == True:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) for x in range(count)]
else:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) / a for x in range(count)]
print(center)
ax.axis('equal')
for data in Data:
# percentile
p = [0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75]
pen = [round((len(data) + 1) * x, 8) for x in p]
data = sorted(data)
d = [np.quantile(data, i) for i in p]
# outlier
IQR = d[-1] - d[0]
upper = d[-1] + 1.5 * IQR
lower = d[0] - 1.5 * IQR
Upper = min(upper, data[-1])
Lower = max(lower, data[0])
outliers = []
for p in data:
if p > upper or p < lower:
outliers.append(p)
if outlier == True:
w = (data[-1] - data[0]) / n_bins
for p in outliers:
rect = mpathes.Ellipse((center[i], p), 0.04 * center[-1], 0.04 * center[-1],
ec=outlier_edgecolor, fc=outlier_facecolor, alpha=outlier_alpha)
ax.add_patch(rect)
else:
w = (Upper - Lower) / n_bins
# hist
bins = [w * i for i in range(n_bins + 1)]
Bin = []
for k in range(n_bins):
s = 0
for j in data:
if j >= bins[k] and j < bins[k + 1]:
s += 1
Bin.append(s)
M = max(Bin)
try:
bar_width = (center[1]-center[0])*0.6
except:
bar_width=M
Mb = bar_width/M
for c in range(len(Bin)):
rect = mpathes.Rectangle((center[i], bins[c] + Lower), Bin[c]*Mb , w,
ec=bin_edgecolor, fc=bin_facecolor, alpha=hist_alpha)
ax.add_patch(rect)
# whisker
ax.hlines(Upper, center[i] - 0.1 * center[0], center[i], whisker_edgecolor)
ax.hlines(Lower, center[i] - 0.1 * center[0], center[i], whisker_edgecolor)
ax.vlines(center[i], Lower, d[0], whisker_edgecolor)
ax.vlines(center[i], d[-1], Upper, whisker_edgecolor)
# median
ax.hlines(d[5], center[i] - 0.2 * center[0], center[i], median_edgecolor, lw=3)
# box
rect = mpathes.Rectangle((center[i] - 0.2 * center[0], d[0]), 0.2 * center[0], d[-1] - d[0],
ec=box_edgecolor, fc=box_facecolor, alpha=box_alpha)
ax.add_patch(rect)
i += 1
plt.show()
def creative_boxplot(ax,
Data,
outlier=True,
box_facecolor='white',
box_edgecolor='k',
outlier_facecolor='b',
outlier_edgecolor=None,
whisker_edgecolor='k',
median_edgecolor='k',
box_alpha=1.0,
outlier_alpha=1.0,
point_alpha=0.3):
try:
h = max(max(p) for p in Data) + 0.1 * abs(max(max(p) for p in Data))
l = min(min(p) for p in Data) + 0.1 * abs(min(min(p) for p in Data))
except:
print('Wrong data type, please input a list of numerical list')
return 'Wrong data type, please input a list of numerical list'
count = len(Data)
a = (h - l) / 2000
if outlier == True:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) for x in range(count)]
# lw_l = 0.000001 * h
else:
center = [round(((h - l) / (count + 1)) * (x + 1), 8) / a for x in range(count)]
# lw_l = 0.00005 * h
print(center)
ax.axis('equal')
i = 0
point = []
for data in Data:
data = sorted(data)
# percentile
p = [0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75]
pen = [round((len(data) + 1) * x, 2) for x in p]
d = [np.quantile(data, j) for j in p]
# outlier
IQR = d[-1] - d[0]
upper = d[-1] + 1.5 * IQR
lower = d[0] - 1.5 * IQR
Upper = min(upper, data[-1])
Lower = max(lower, data[0])
outliers = []
for p in data:
if p > upper or p < lower:
outliers.append(p)
if outlier == True:
for p in outliers:
rect = mpathes.Ellipse((center[i], p), 0.02 * center[-1], 0.02 * center[-1],
ec=outlier_edgecolor, fc=outlier_facecolor, alpha=outlier_alpha)
rect.set_alpha(0.7)
ax.add_patch(rect)
# box
rect = mpathes.Rectangle((center[i] - 0.2 * center[0], d[0]), 0.4 * center[0], d[-1] - d[0],
ec=box_edgecolor, fc=box_facecolor, alpha=box_alpha)
ax.add_patch(rect)
# points
for p in data:
if p not in outliers:
x = center[i] - 0.05 * center[0] + random.uniform(0, 0.1 * center[0])
rect = mpathes.Ellipse((x, p), 0.01 * center[0], 0.01 * center[0], ec=outlier_edgecolor,
fc=outlier_facecolor)
rect.set_alpha(point_alpha)
ax.add_patch(rect)
# median
ax.hlines(d[5], center[i] - 0.2 * center[0], center[i] + 0.2 * center[0], median_edgecolor, lw=3)
# line
point.append([center[i], d[5]])
i += 1
for i in range(len(point) - 1):
x = point[i][0]
y = point[i][1]
arrow = mpathes.FancyArrowPatch((point[i][0], point[i][1]), (point[i + 1][0], point[i + 1][1]),
arrowstyle='-', color='g')
ax.add_patch(arrow)
plt.show() | 41.689873 | 111 | 0.438743 | 1,724 | 13,174 | 3.289443 | 0.077146 | 0.04814 | 0.02645 | 0.037031 | 0.845354 | 0.837066 | 0.828425 | 0.824193 | 0.822783 | 0.820138 | 0 | 0.057919 | 0.427281 | 13,174 | 316 | 112 | 41.689873 | 0.693704 | 0.016016 | 0 | 0.789474 | 0 | 0 | 0.038723 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.015038 | false | 0 | 0.015038 | 0 | 0.048872 | 0.026316 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
9bc858a2db2ea5afdb30222b3a6cea0904c5c567 | 90 | py | Python | src/h3/api/__init__.py | SaveTheRbtz/h3-py | f59a1eafed8915d4c35417bf60a6a2f169ea9f23 | [
"Apache-2.0"
] | 498 | 2018-07-30T22:45:48.000Z | 2022-03-30T13:59:45.000Z | src/h3/api/__init__.py | SaveTheRbtz/h3-py | f59a1eafed8915d4c35417bf60a6a2f169ea9f23 | [
"Apache-2.0"
] | 199 | 2018-07-31T16:55:23.000Z | 2022-03-29T16:45:44.000Z | src/h3/api/__init__.py | SaveTheRbtz/h3-py | f59a1eafed8915d4c35417bf60a6a2f169ea9f23 | [
"Apache-2.0"
] | 107 | 2018-07-30T21:09:24.000Z | 2022-01-30T22:58:44.000Z | # flake8: noqa
from . import basic_int
from . import basic_str
from . import memview_int
| 15 | 25 | 0.766667 | 14 | 90 | 4.714286 | 0.571429 | 0.454545 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.013514 | 0.177778 | 90 | 5 | 26 | 18 | 0.878378 | 0.133333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
9bf94c691333250a122a2d8729c0d95dd9fc8bbd | 10,845 | py | Python | google/ads/google_ads/v6/proto/services/reach_plan_service_pb2_grpc.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v6/proto/services/reach_plan_service_pb2_grpc.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v6/proto/services/reach_plan_service_pb2_grpc.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.ads.google_ads.v6.proto.services import reach_plan_service_pb2 as google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2
class ReachPlanServiceStub(object):
"""Proto file describing the reach plan service.
Reach Plan Service gives users information about audience size that can
be reached through advertisement on YouTube. In particular,
GenerateReachForecast provides estimated number of people of specified
demographics that can be reached by an ad in a given market by a campaign of
certain duration with a defined budget.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListPlannableLocations = channel.unary_unary(
'/google.ads.googleads.v6.services.ReachPlanService/ListPlannableLocations',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableLocationsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableLocationsResponse.FromString,
)
self.ListPlannableProducts = channel.unary_unary(
'/google.ads.googleads.v6.services.ReachPlanService/ListPlannableProducts',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableProductsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableProductsResponse.FromString,
)
self.GenerateProductMixIdeas = channel.unary_unary(
'/google.ads.googleads.v6.services.ReachPlanService/GenerateProductMixIdeas',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateProductMixIdeasRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateProductMixIdeasResponse.FromString,
)
self.GenerateReachForecast = channel.unary_unary(
'/google.ads.googleads.v6.services.ReachPlanService/GenerateReachForecast',
request_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateReachForecastRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateReachForecastResponse.FromString,
)
class ReachPlanServiceServicer(object):
"""Proto file describing the reach plan service.
Reach Plan Service gives users information about audience size that can
be reached through advertisement on YouTube. In particular,
GenerateReachForecast provides estimated number of people of specified
demographics that can be reached by an ad in a given market by a campaign of
certain duration with a defined budget.
"""
def ListPlannableLocations(self, request, context):
"""Returns the list of plannable locations (for example, countries & DMAs).
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListPlannableProducts(self, request, context):
"""Returns the list of per-location plannable YouTube ad formats with allowed
targeting.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateProductMixIdeas(self, request, context):
"""Generates a product mix ideas given a set of preferences. This method
helps the advertiser to obtain a good mix of ad formats and budget
allocations based on its preferences.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GenerateReachForecast(self, request, context):
"""Generates a reach forecast for a given targeting / product mix.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ReachPlanServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'ListPlannableLocations': grpc.unary_unary_rpc_method_handler(
servicer.ListPlannableLocations,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableLocationsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableLocationsResponse.SerializeToString,
),
'ListPlannableProducts': grpc.unary_unary_rpc_method_handler(
servicer.ListPlannableProducts,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableProductsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableProductsResponse.SerializeToString,
),
'GenerateProductMixIdeas': grpc.unary_unary_rpc_method_handler(
servicer.GenerateProductMixIdeas,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateProductMixIdeasRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateProductMixIdeasResponse.SerializeToString,
),
'GenerateReachForecast': grpc.unary_unary_rpc_method_handler(
servicer.GenerateReachForecast,
request_deserializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateReachForecastRequest.FromString,
response_serializer=google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateReachForecastResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v6.services.ReachPlanService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ReachPlanService(object):
"""Proto file describing the reach plan service.
Reach Plan Service gives users information about audience size that can
be reached through advertisement on YouTube. In particular,
GenerateReachForecast provides estimated number of people of specified
demographics that can be reached by an ad in a given market by a campaign of
certain duration with a defined budget.
"""
@staticmethod
def ListPlannableLocations(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.ReachPlanService/ListPlannableLocations',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableLocationsRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableLocationsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ListPlannableProducts(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.ReachPlanService/ListPlannableProducts',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableProductsRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.ListPlannableProductsResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateProductMixIdeas(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.ReachPlanService/GenerateProductMixIdeas',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateProductMixIdeasRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateProductMixIdeasResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def GenerateReachForecast(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/google.ads.googleads.v6.services.ReachPlanService/GenerateReachForecast',
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateReachForecastRequest.SerializeToString,
google_dot_ads_dot_googleads_dot_v6_dot_services_dot_reach__plan__service__pb2.GenerateReachForecastResponse.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 55.902062 | 169 | 0.743753 | 1,129 | 10,845 | 6.698849 | 0.140833 | 0.03808 | 0.067698 | 0.065318 | 0.825202 | 0.817797 | 0.811979 | 0.759884 | 0.759884 | 0.727621 | 0 | 0.007052 | 0.202397 | 10,845 | 193 | 170 | 56.19171 | 0.867283 | 0.161088 | 0 | 0.484848 | 1 | 0 | 0.101246 | 0.080593 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075758 | false | 0 | 0.015152 | 0.030303 | 0.143939 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
50572595bbdd27fdd124662d0157babedb250953 | 27,483 | py | Python | tests/test_windows/test_ui_colour_picker_dialog.py | lionel42/pygame_gui | 27b51f5b811b4569bc463566bc9f2d82ada119f6 | [
"MIT"
] | null | null | null | tests/test_windows/test_ui_colour_picker_dialog.py | lionel42/pygame_gui | 27b51f5b811b4569bc463566bc9f2d82ada119f6 | [
"MIT"
] | null | null | null | tests/test_windows/test_ui_colour_picker_dialog.py | lionel42/pygame_gui | 27b51f5b811b4569bc463566bc9f2d82ada119f6 | [
"MIT"
] | null | null | null | import pygame
import pytest
import pygame_gui
from tests.shared_comparators import compare_surfaces
from pygame_gui.core.ui_container import UIContainer
from pygame_gui.windows import UIColourPickerDialog
from pygame_gui.windows.ui_colour_picker_dialog import UIColourChannelEditor
from pygame_gui.core.utility import restore_premul_col
try:
# mouse button constants not defined in pygame 1.9.3
pygame.BUTTON_LEFT
pygame.BUTTON_MIDDLE
pygame.BUTTON_RIGHT
except AttributeError:
pygame.BUTTON_LEFT = 1
pygame.BUTTON_MIDDLE = 2
pygame.BUTTON_RIGHT = 3
class TestUIColourChannelEditor:
def test_creation(self, _init_pygame, default_ui_manager, _display_surface_return_none):
UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360))
def test_text_entry_finished(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360))
channel_editor.entry.set_text('50')
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': channel_editor.entry.rect.center}
))
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONUP,
{'button': pygame.BUTTON_LEFT,
'pos': channel_editor.entry.rect.center}
))
default_ui_manager.process_events(pygame.event.Event(pygame.KEYDOWN,
{'key': pygame.K_RETURN}
))
for event in pygame.event.get():
default_ui_manager.process_events(event)
assert channel_editor.slider.current_value == 50
def test_slider_moved_finished(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360))
channel_editor.slider.current_value = 100
default_ui_manager.process_events(pygame.event.Event(pygame_gui.UI_HORIZONTAL_SLIDER_MOVED,
{'ui_element': channel_editor.slider}))
assert channel_editor.entry.get_text() == '100'
def test_set_value(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360))
assert channel_editor.entry.get_text() == '0'
assert channel_editor.slider.get_current_value() == 0
channel_editor.set_value(200)
assert channel_editor.entry.get_text() == '200'
assert channel_editor.slider.get_current_value() == 200
assert channel_editor.current_value == 200
def test_set_position(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
test_container = UIContainer(relative_rect=pygame.Rect(100, 100, 300, 60),
manager=default_ui_manager)
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360),
container=test_container)
channel_editor.set_position((150, 30))
assert channel_editor.relative_rect.topleft == (50, -70)
def test_set_relative_position(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
test_container = UIContainer(relative_rect=pygame.Rect(100, 100, 300, 60),
manager=default_ui_manager)
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360),
container=test_container)
channel_editor.set_relative_position((50, 50))
assert channel_editor.rect.topleft == (150, 150)
def test_set_dimensions(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
test_container = UIContainer(relative_rect=pygame.Rect(100, 100, 300, 60),
manager=default_ui_manager)
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360),
container=test_container)
channel_editor.set_dimensions((200, 29))
assert channel_editor.rect.size == (200, 29)
def test_show(self, _init_pygame, default_ui_manager, _display_surface_return_none):
test_container = UIContainer(relative_rect=pygame.Rect(100, 100, 300, 60),
manager=default_ui_manager)
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360),
container=test_container,
visible=0)
channel_editor.set_dimensions((200, 29))
assert channel_editor.visible == 0
assert channel_editor.element_container.visible == 0
assert channel_editor.label.visible == 0
assert channel_editor.entry.visible == 0
assert channel_editor.slider.visible == 0
channel_editor.show()
assert channel_editor.visible == 1
assert channel_editor.element_container.visible == 1
assert channel_editor.label.visible == 1
assert channel_editor.entry.visible == 1
assert channel_editor.slider.visible == 1
def test_hide(self, _init_pygame, default_ui_manager, _display_surface_return_none):
test_container = UIContainer(relative_rect=pygame.Rect(100, 100, 300, 60),
manager=default_ui_manager)
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=default_ui_manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360),
container=test_container,
visible=1)
channel_editor.set_dimensions((200, 29))
assert channel_editor.visible == 1
assert channel_editor.element_container.visible == 1
assert channel_editor.label.visible == 1
assert channel_editor.entry.visible == 1
assert channel_editor.slider.visible == 1
channel_editor.hide()
assert channel_editor.visible == 0
assert channel_editor.element_container.visible == 0
assert channel_editor.label.visible == 0
assert channel_editor.entry.visible == 0
assert channel_editor.slider.visible == 0
def test_show_hide_rendering(self, _init_pygame, default_ui_manager, _display_surface_return_none):
resolution = (400, 400)
empty_surface = pygame.Surface(resolution)
empty_surface.fill(pygame.Color(0, 0, 0))
surface = empty_surface.copy()
manager = pygame_gui.UIManager(resolution)
test_container = UIContainer(relative_rect=pygame.Rect(100, 100, 300, 60),
manager=manager)
manager.draw_ui(empty_surface)
channel_editor = UIColourChannelEditor(relative_rect=pygame.Rect(0, 0, 150, 29),
manager=manager,
name='H:',
channel_index=0,
initial_value=0,
value_range=(0, 360),
container=test_container,
visible=0)
channel_editor.set_dimensions((200, 29))
manager.update(0.01)
manager.draw_ui(surface)
assert compare_surfaces(empty_surface, surface)
surface.fill(pygame.Color(0, 0, 0))
channel_editor.show()
manager.update(0.01)
manager.draw_ui(surface)
assert not compare_surfaces(empty_surface, surface)
surface.fill(pygame.Color(0, 0, 0))
channel_editor.hide()
manager.update(0.01)
manager.draw_ui(surface)
assert compare_surfaces(empty_surface, surface)
class TestUIColourPickerDialog:
def test_creation(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager)
def test_create_too_small(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
with pytest.warns(UserWarning, match="Initial size"):
UIColourPickerDialog(rect=pygame.Rect(100, 100, 50, 50),
manager=default_ui_manager)
def test_press_cancel_button(self, _init_pygame, default_ui_manager,
_display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager)
is_alive_pre_events = colour_picker.alive()
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.cancel_button.rect.center}))
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONUP,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.cancel_button.rect.center}))
for event in pygame.event.get():
default_ui_manager.process_events(event)
is_dead_post_events = not colour_picker.alive()
assert is_alive_pre_events is True and is_dead_post_events is True
def test_press_ok_button(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255))
is_alive_pre_events = colour_picker.alive()
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.ok_button.rect.center}))
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONUP,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.ok_button.rect.center}))
for event in pygame.event.get():
default_ui_manager.process_events(event)
confirm_event_fired = False
event_colour = None
for event in pygame.event.get():
default_ui_manager.process_events(event)
if (event.type == pygame_gui.UI_COLOUR_PICKER_COLOUR_PICKED and
event.ui_element == colour_picker):
confirm_event_fired = True
event_colour = event.colour
is_dead_post_events = not colour_picker.alive()
assert is_alive_pre_events
assert is_dead_post_events
assert confirm_event_fired
assert event_colour == pygame.Color(200, 220, 50, 255)
def test_click_in_saturation_value_square_button(self, _init_pygame,
default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255))
assert colour_picker.current_colour == pygame.Color(200, 220, 50, 255)
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.sat_value_square.rect.center}
))
assert colour_picker.current_colour == pygame.Color(120, 127, 63, 255)
def test_mess_with_colour_channel_event(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255))
colour_picker.red_channel.entry.set_text('50')
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.red_channel.entry.rect.center}
))
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONUP,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.red_channel.entry.rect.center}
))
default_ui_manager.process_events(pygame.event.Event(pygame.KEYDOWN,
{'key': pygame.K_RETURN}
))
for event in pygame.event.get():
default_ui_manager.process_events(event)
confirm_event_fired = False
for event in pygame.event.get():
if (event.type == pygame_gui.UI_COLOUR_PICKER_COLOUR_CHANNEL_CHANGED and
event.ui_element == colour_picker.red_channel):
confirm_event_fired = True
assert confirm_event_fired
def test_update_saturation_value_square(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager)
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.sat_value_square.rect.center}
))
assert colour_picker.current_colour == pygame.Color(127, 63, 63, 255)
colour_picker.hue_channel.current_value = 150
colour_picker.update_saturation_value_square()
default_ui_manager.process_events(pygame.event.Event(pygame.MOUSEBUTTONDOWN,
{'button': pygame.BUTTON_LEFT,
'pos': colour_picker.sat_value_square.rect.center}
))
assert colour_picker.current_colour == pygame.Color(63, 127, 95, 255)
def test_update_current_colour_image(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255))
pixel_colour = colour_picker.current_colour_image.image.get_at(
(int(colour_picker.current_colour_image.rect.width/2),
int(colour_picker.current_colour_image.rect.height/2)))
pixel_colour = restore_premul_col(pixel_colour) # this is going to be slightly inaccurate
assert 201 >= pixel_colour.r >= 199
assert 221 >= pixel_colour.g >= 219
assert 51 >= pixel_colour.b >= 49
assert pixel_colour.a == 255
colour_picker.current_colour = pygame.Color(50, 180, 150, 255)
colour_picker.update_current_colour_image()
pixel_colour = colour_picker.current_colour_image.image.get_at(
(int(colour_picker.current_colour_image.rect.width / 2),
int(colour_picker.current_colour_image.rect.height / 2)))
pixel_colour = restore_premul_col(pixel_colour)
assert 51 >= pixel_colour.r >= 49
assert 181 >= pixel_colour.g >= 179
assert 151 >= pixel_colour.b >= 149
assert pixel_colour.a == 255
def test_changed_rgb_update_hsv(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255))
assert colour_picker.red_channel.current_value == 200
assert colour_picker.green_channel.current_value == 220
assert colour_picker.blue_channel.current_value == 50
colour_picker.red_channel.set_value(100)
colour_picker.green_channel.set_value(80)
colour_picker.blue_channel.set_value(190)
colour_picker.current_colour = pygame.Color(colour_picker.red_channel.current_value,
colour_picker.green_channel.current_value,
colour_picker.blue_channel.current_value)
assert colour_picker.hue_channel.current_value == 67
assert colour_picker.sat_channel.current_value == 77
assert colour_picker.value_channel.current_value == 86
colour_picker.changed_rgb_update_hsv()
assert colour_picker.hue_channel.current_value == 250
assert colour_picker.sat_channel.current_value == 57
assert colour_picker.value_channel.current_value == 74
def test_changed_hsv_update_rgb(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255))
assert colour_picker.hue_channel.current_value == 67
assert colour_picker.sat_channel.current_value == 77
assert colour_picker.value_channel.current_value == 86
colour_picker.hue_channel.set_value(250)
colour_picker.sat_channel.set_value(57)
colour_picker.value_channel.set_value(74)
colour_picker.current_colour.hsva = (colour_picker.hue_channel.current_value,
colour_picker.sat_channel.current_value,
colour_picker.value_channel.current_value,
100)
assert colour_picker.red_channel.current_value == 200
assert colour_picker.green_channel.current_value == 220
assert colour_picker.blue_channel.current_value == 50
colour_picker.changed_hsv_update_rgb()
assert colour_picker.red_channel.current_value == 99
assert colour_picker.green_channel.current_value == 81
assert colour_picker.blue_channel.current_value == 188
def test_show(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255),
visible=0)
assert colour_picker.visible == 0
assert colour_picker.ok_button.visible == 0
assert colour_picker.cancel_button.visible == 0
assert colour_picker.current_colour_image.visible == 0
assert colour_picker.sat_value_square.visible == 0
assert colour_picker.hue_channel.visible == 0
assert colour_picker.sat_channel.visible == 0
assert colour_picker.value_channel.visible == 0
assert colour_picker.red_channel.visible == 0
assert colour_picker.green_channel.visible == 0
assert colour_picker.blue_channel.visible == 0
colour_picker.show()
assert colour_picker.visible == 1
assert colour_picker.ok_button.visible == 1
assert colour_picker.cancel_button.visible == 1
assert colour_picker.current_colour_image.visible == 1
assert colour_picker.sat_value_square.visible == 1
assert colour_picker.hue_channel.visible == 1
assert colour_picker.sat_channel.visible == 1
assert colour_picker.value_channel.visible == 1
assert colour_picker.red_channel.visible == 1
assert colour_picker.green_channel.visible == 1
assert colour_picker.blue_channel.visible == 1
def test_hide(self, _init_pygame, default_ui_manager, _display_surface_return_none):
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=default_ui_manager,
initial_colour=pygame.Color(200, 220, 50, 255),
visible=1)
assert colour_picker.visible == 1
assert colour_picker.ok_button.visible == 1
assert colour_picker.cancel_button.visible == 1
assert colour_picker.current_colour_image.visible == 1
assert colour_picker.sat_value_square.visible == 1
assert colour_picker.hue_channel.visible == 1
assert colour_picker.sat_channel.visible == 1
assert colour_picker.value_channel.visible == 1
assert colour_picker.red_channel.visible == 1
assert colour_picker.green_channel.visible == 1
assert colour_picker.blue_channel.visible == 1
colour_picker.hide()
assert colour_picker.visible == 0
assert colour_picker.ok_button.visible == 0
assert colour_picker.cancel_button.visible == 0
assert colour_picker.current_colour_image.visible == 0
assert colour_picker.sat_value_square.visible == 0
assert colour_picker.hue_channel.visible == 0
assert colour_picker.sat_channel.visible == 0
assert colour_picker.value_channel.visible == 0
assert colour_picker.red_channel.visible == 0
assert colour_picker.green_channel.visible == 0
assert colour_picker.blue_channel.visible == 0
def test_show_hide_rendering(self, _init_pygame, default_ui_manager, _display_surface_return_none):
resolution = (600, 600)
empty_surface = pygame.Surface(resolution)
empty_surface.fill(pygame.Color(0, 0, 0))
surface = empty_surface.copy()
manager = pygame_gui.UIManager(resolution)
colour_picker = UIColourPickerDialog(rect=pygame.Rect(100, 100, 400, 400),
manager=manager,
initial_colour=pygame.Color(200, 220, 50, 255),
visible=0)
manager.update(0.01)
manager.draw_ui(surface)
assert compare_surfaces(empty_surface, surface)
surface.fill(pygame.Color(0, 0, 0))
colour_picker.show()
manager.update(0.01)
manager.draw_ui(surface)
assert not compare_surfaces(empty_surface, surface)
surface.fill(pygame.Color(0, 0, 0))
colour_picker.hide()
manager.update(0.01)
manager.draw_ui(surface)
assert compare_surfaces(empty_surface, surface)
if __name__ == '__main__':
pytest.console_main()
| 49.429856 | 114 | 0.555907 | 2,742 | 27,483 | 5.233771 | 0.072575 | 0.105359 | 0.075814 | 0.043272 | 0.859104 | 0.849 | 0.809491 | 0.782454 | 0.782454 | 0.770957 | 0 | 0.047141 | 0.373249 | 27,483 | 555 | 115 | 49.518919 | 0.786009 | 0.003275 | 0 | 0.748848 | 0 | 0 | 0.006061 | 0 | 0 | 0 | 0 | 0 | 0.267281 | 1 | 0.052995 | false | 0 | 0.018433 | 0 | 0.076037 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
505e3f2751922f26a48a05b1dfb74b1898e153a5 | 109 | py | Python | train_lib/docker_util/__init__.py | PHT-EU/train-container-library | b0c94c3a543fad48681b4c7f2f16f56f32054e71 | [
"MIT"
] | 1 | 2021-12-16T12:06:30.000Z | 2021-12-16T12:06:30.000Z | train_lib/docker_util/__init__.py | PHT-EU/train-container-library | b0c94c3a543fad48681b4c7f2f16f56f32054e71 | [
"MIT"
] | 35 | 2021-11-02T09:19:39.000Z | 2022-03-31T13:24:33.000Z | train_lib/docker_util/__init__.py | PHT-Medic/train-container-library | b0c94c3a543fad48681b4c7f2f16f56f32054e71 | [
"MIT"
] | null | null | null | from .docker_ops import files_from_archive, result_files_from_archive, extract_train_config, extract_archive
| 54.5 | 108 | 0.899083 | 16 | 109 | 5.5625 | 0.625 | 0.202247 | 0.359551 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.06422 | 109 | 1 | 109 | 109 | 0.872549 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
ac9ead13f7ddb64cf980f03a131b11b61ffa5f35 | 24,338 | py | Python | eval/down_stream_tasks/add_on_classification.py | ratschlab/ncl | 8ad4e779cdc6f2465cbb3cfbd449904d62e17154 | [
"MIT"
] | 7 | 2021-06-11T08:18:15.000Z | 2022-03-23T14:56:13.000Z | eval/down_stream_tasks/add_on_classification.py | ratschlab/ncl | 8ad4e779cdc6f2465cbb3cfbd449904d62e17154 | [
"MIT"
] | 2 | 2022-01-19T08:05:17.000Z | 2022-02-08T09:50:55.000Z | eval/down_stream_tasks/add_on_classification.py | ratschlab/ncl | 8ad4e779cdc6f2465cbb3cfbd449904d62e17154 | [
"MIT"
] | null | null | null | import gin
import numpy as np
import tensorflow as tf
import tensorflow_addons as tfa
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
@gin.configurable('addon_classification')
class AddOnClassification(tf.Module):
def __init__(self, representation_model=gin.REQUIRED, classifier=gin.REQUIRED, loss_fn=gin.REQUIRED,
optimizer=gin.REQUIRED, patience=10, end_to_end=False, stopping_metric='loss',
grad_clip=None, profile=False):
"""Wrapper around the classifier we build on top of the representation model.
Args:
representation_model: tf.Module corresponding to the representation model we train at previous step.
classifier: tf.keras.Model to use on top of representation model.
loss_fn: tf.keras loss function.
optimizer: tf.keras.optimizer.
patience: Integer setting the patience for the early stopping criterion.
end_to_end: Boolean to decide whether or not to train the entire model (encoder + add_on classifier).
stopping_metric : String telling which metric to track for the stopping criterion.
grad_clip: Value to clip gradient if needed. None means no gradient clipping.
profile: Boolean indicating whether or not we want to profile the training to evaluate efficiency.
"""
super(AddOnClassification, self).__init__()
self.representation_fn = representation_model.encoder
self.classifier = classifier
# Reduction is set to None for correct handling of distributed training.
self.loss = loss_fn(reduction=tf.keras.losses.Reduction.NONE)
self.optimizer = optimizer
self.accuracy = tf.keras.metrics.SparseCategoricalAccuracy()
self.auc = tf.keras.metrics.AUC(num_thresholds=1000)
self.auprc = tf.keras.metrics.AUC(curve='PR', num_thresholds=1000)
self.patience = patience
self.stopping_metric = stopping_metric
self.grad_clip = grad_clip
self.profile = profile
self.tf_random_generator = None
if end_to_end:
self.representation_fn.trainable = True
def set_tf_random_generator(self, generator):
self.tf_random_generator = generator
def step_fn(self, inputs, training, num_replica, loss_weights=[0.5, 0.5], augmentations=[]):
"""
Step function encapsulated in model_fn.
"""
if len(inputs) == 2:
sequences, labels = inputs
else:
sequences = inputs[:2]
labels = inputs[-1]
# Handling of unbalanced training set for the loss computation.
sample_weight = tf.cast(labels, dtype=tf.float32) * loss_weights[1] - (
tf.cast(labels, dtype=tf.float32) - 1) * loss_weights[0]
if training:
if augmentations:
for aug_func in augmentations:
sequences = aug_func(sequences, self.tf_random_generator)
with tf.GradientTape() as tape_supervised:
embeddings = self.representation_fn(sequences, training=training)
predictions = self.classifier(embeddings, training=training)
if isinstance(self.loss, tfa.losses.SigmoidFocalCrossEntropy):
loss_supervised = tf.reduce_mean(
self.loss(tf.expand_dims(labels, axis=-1), predictions[:, 1:], sample_weight=sample_weight)) * (
1 / num_replica)
else:
loss_supervised = tf.reduce_mean(self.loss(labels, predictions, sample_weight=sample_weight)) * (
1 / num_replica)
if self.representation_fn.trainable == True:
variables = tape_supervised.watched_variables()
else:
variables = self.classifier.trainable_variables
grads = tape_supervised.gradient(loss_supervised, variables)
if self.grad_clip:
grads, global_norm = tf.clip_by_global_norm(grads, self.grad_clip)
self.optimizer.apply_gradients(zip(grads, variables))
self.accuracy.update_state(labels, predictions)
self.auc.update_state(labels, predictions[:, 1])
self.auprc.update_state(labels, predictions[:, 1])
return loss_supervised
else:
embeddings = self.representation_fn(sequences, training=training)
predictions = self.classifier(embeddings, training=training)
if isinstance(self.loss, tfa.losses.SigmoidFocalCrossEntropy):
loss_supervised = tf.reduce_mean(
self.loss(tf.expand_dims(labels, axis=-1), predictions[:, 1:], sample_weight=sample_weight)) * (
1 / num_replica)
else:
loss_supervised = tf.reduce_mean(self.loss(labels, predictions, sample_weight=sample_weight)) * (
1 / num_replica)
self.accuracy.update_state(labels, predictions)
self.auc.update_state(labels, predictions[:, 1])
self.auprc.update_state(labels, predictions[:, 1])
return loss_supervised
@tf.function
def model_fn(self, inputs, training, strategy, loss_weights=(0.5, 0.5), augmentations=[]):
"""Model function for the downstream task training.
Args:
inputs: Inputs corresponding to (self.representation_fn.inputs, label).
training: Boolean to decide whether to train model for a step.
strategy: (Optional) tf.distribute.Strategy object in case of distributed training.
loss_weights: (Optional) weights to apply to loss functions in the case of an imbalanced dataset.
augmentations: (Optional) List of augmentations we want to apply to each input.
Returns:
loss: Tensor corresponding to the loss over a step.
"""
if strategy is not None:
loss_per_sub_batch = strategy.experimental_run_v2(self.step_fn,
args=(inputs, training, strategy.num_replicas_in_sync,
loss_weights, augmentations))
loss = strategy.reduce(
tf.distribute.ReduceOp.SUM, loss_per_sub_batch, axis=None)
else:
loss = self.step_fn(inputs, training, num_replica=1, loss_weights=loss_weights, augmentations=augmentations)
return loss
def train(self, data_iterator, training_steps, loss_weights=(0.5, 0.5), strategy=None, summary_writer=None,
checkpoint_manager=None, validation_config=None, augmentations=[]):
"""Custom train loop for the add on classifier.
Args:
data_iterator: An iterator yielding batch samples of the data.
training_steps: Integer representing the number of iterations.
loss_weights: weights to apply to loss functions in the case of an imbalanced dataset.
strategy: (Optional) tf.distribute.Strategy object in case of distributed training.
summary_writer: (Optional) tf.summary.Writer for tensorboard logging.
checkpoint_manager: (Optional) tf.train.CheckpointManager for model saving.
validation_config: (Optional) Dictionary with the necessary params for validation.
augmentations: List of augmentations functions to apply to input.
"""
if checkpoint_manager is None:
print('There is no model saving')
# Setting up validation phase is a config is provided
if validation_config is not None:
validation_frequency = validation_config['frequency']
else:
validation_frequency = training_steps + 1
best_val_metric = tf.cast(-np.inf, tf.float32)
plateau = 0
train_loss = 0.0
ts_dir = '/'.join(checkpoint_manager.directory.split('/')[:-1] + ['tensorboard'])
for step in range(training_steps):
if step == 1000 and self.profile:
print('Open profiling')
tf.profiler.experimental.start(ts_dir)
batch = data_iterator.next()
with summary_writer.as_default():
train_loss += self.model_fn(batch,
training=True,
strategy=strategy,
loss_weights=tf.cast(loss_weights, dtype=tf.float32),
augmentations=augmentations)
if (step + 1) % validation_frequency == 0 and step != 0:
if summary_writer:
with summary_writer.as_default():
tf.summary.scalar("train_loss", train_loss / validation_frequency,
step=self.optimizer.iterations)
tf.summary.scalar("train_auc", self.auc.result(), step=self.optimizer.iterations)
tf.summary.scalar("train_auprc", self.auprc.result(), step=self.optimizer.iterations)
tf.summary.scalar("train_accuracy", self.accuracy.result(), step=self.optimizer.iterations)
train_loss = 0.0
self.auc.reset_states()
self.auprc.reset_states()
self.accuracy.reset_states()
val_metrics = self.validate(validation_config['data_iterator'],
loss_weights=loss_weights, strategy=strategy,
summary_writer=summary_writer)
if checkpoint_manager and (val_metrics[self.stopping_metric] > best_val_metric or self.patience <= 0):
best_val_metric = val_metrics[self.stopping_metric]
print('Saving model at step {}'.format(step))
checkpoint_manager.save()
plateau = 0
else:
plateau += 1
if plateau >= self.patience and self.patience > 0:
print('Loss dit not improve for {} testing step'.format(self.patience))
break
if step == 1005 and self.profile:
print('Close profiling')
tf.profiler.experimental.stop()
def validate(self, data_iterator, loss_weights=(0.5, 0.5), strategy=None, summary_writer=None):
if summary_writer is None:
print('There is no summary writer')
loss = 0.0
num_batch = 0.0
for batch in data_iterator:
loss += self.model_fn(batch, training=False, strategy=strategy,
loss_weights=tf.cast(loss_weights, dtype=tf.float32))
num_batch += 1.0
metrics = {'loss': - loss / num_batch, 'auroc': self.auc.result(), 'auprc': self.auprc.result(),
'acc': self.accuracy.result()}
if summary_writer:
with summary_writer.as_default():
tf.summary.scalar("val_loss", - metrics['loss'], step=self.optimizer.iterations)
tf.summary.scalar("val_auc", metrics['auroc'], step=self.optimizer.iterations)
tf.summary.scalar("val_auprc", metrics['auprc'], step=self.optimizer.iterations)
tf.summary.scalar("val_accuracy", metrics['acc'], step=self.optimizer.iterations)
self.auc.reset_states()
self.accuracy.reset_states()
self.auprc.reset_states()
return metrics
def compute_metrics(self, data_iterator):
self.auc.reset_states()
self.accuracy.reset_states()
self.auprc.reset_states()
for batch in data_iterator:
if len(batch) == 2:
data, labels = batch
else:
data = batch[:2]
labels = batch[-1]
predictions = self.predict(data)
self.accuracy.update_state(labels, predictions)
self.auc.update_state(labels, predictions[:, 1])
self.auprc.update_state(labels, predictions[:, 1])
results = {'accuracy': self.accuracy.result(), 'auroc': self.auc.result(), 'auprc': self.auprc.result(),
'tp': self.auprc.true_positives, 'tn': self.auprc.true_negatives,
'fn': self.auprc.false_negatives, 'fp': self.auprc.false_positives}
tn = self.auprc.true_negatives
tp = self.auprc.true_positives
fp = self.auprc.false_positives
fn = self.auprc.false_negatives
P = tp / (tp + fp)
R = tp / (tp + fn)
fig = plt.figure()
plt.plot(R, P)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.title('2-class Precision-Recall curve: AUPRC={0:0.2f}'.format(results['auprc']))
return results, fig
def predict(self, data):
embeddings = self.representation_fn(data, training=False)
predictions = self.classifier(embeddings, training=False)
return predictions
@gin.configurable('addon_binned_classification')
class AddOnBinnedClassification(tf.Module):
def __init__(self, representation_model=gin.REQUIRED, classifier=gin.REQUIRED, loss_fn=gin.REQUIRED,
optimizer=gin.REQUIRED, patience=10, end_to_end=False, stopping_metric='loss',
grad_clip=None, profile=False, bins=None):
"""Wrapper around the classifier we build on top of the representation model for a binned classification.
Args:
representation_model: tf.Module corresponding to the representation model we train at previous step.
classifier: tf.keras.Model to use on top of representation model.
loss_fn: tf.keras loss function.
optimizer: tf.keras.optimizer.
patience: Integer setting the patience for the early stopping criterion.
end_to_end: Boolean to decide whether or not to train the entire model (encoder + add_on classifier).
stopping_metric : String telling which metric to track for the stopping criterion.
grad_clip: Value to clip gradient if needed. None means no gradient clipping.
profile: Boolean indicating whether or not we want to profile the training to evaluate efficiency.
bins: List of bins separator to build the classes on.
"""
super(AddOnBinnedClassification, self).__init__()
representation_model.encoder.int_op = []
self.representation_fn = representation_model.encoder
self.classifier = classifier
# Reduction is set to None for correct handling of distributed training.
self.loss = loss_fn(reduction=tf.keras.losses.Reduction.NONE)
self.optimizer = optimizer
self.accuracy = tf.keras.metrics.SparseCategoricalAccuracy()
self.metrics = {'acc': self.accuracy}
if bins:
self.bins = bins
self.n_bins = len(bins) + 2
self.kappa = tfa.metrics.CohenKappa(num_classes=self.n_bins, sparse_labels=True, weightage='linear')
self.metrics['kappa'] = self.kappa
self.patience = patience
self.stopping_metric = stopping_metric
self.grad_clip = grad_clip
self.profile = profile
self.tf_random_generator = None
if end_to_end:
self.representation_fn.trainable = True
def set_tf_random_generator(self, generator):
self.tf_random_generator = generator
def step_fn(self, inputs, training, num_replica, augmentations=[]):
"""
Step function encapsulated in model_fn.
"""
if len(inputs) == 2:
sequences, labels = inputs
else:
sequences = inputs[:2]
labels = inputs[-1]
labels = tf.expand_dims(labels, axis=-1)
binned_labels = tf.raw_ops.Bucketize(input=labels, boundaries=self.bins)
if training:
if augmentations:
for aug_func in augmentations:
sequences = aug_func(sequences, self.tf_random_generator)
with tf.GradientTape() as tape_supervised:
embeddings = self.representation_fn(sequences, training=training)
predictions = self.classifier(embeddings, training=training)
loss_supervised = tf.reduce_mean(self.loss(binned_labels, predictions)) * (
1 / num_replica)
if self.representation_fn.trainable == True:
variables = tape_supervised.watched_variables()
else:
variables = self.classifier.trainable_variables
grads = tape_supervised.gradient(loss_supervised, variables)
if self.grad_clip:
grads, global_norm = tf.clip_by_global_norm(grads, self.grad_clip)
self.optimizer.apply_gradients(zip(grads, variables))
else:
embeddings = self.representation_fn(sequences, training=training)
predictions = self.classifier(embeddings, training=training)
loss_supervised = tf.reduce_mean(self.loss(binned_labels, predictions)) * (
1 / num_replica)
binned_pred = tf.math.argmax(predictions, axis=-1)
for name, metric in self.metrics.items():
if name == 'kappa':
metric.update_state(binned_labels[:, 0], binned_pred)
else:
metric.update_state(binned_labels, predictions)
return loss_supervised
@tf.function
def model_fn(self, inputs, training, strategy, augmentations=[]):
"""Model function for the downstream task training.
Args:
inputs: Inputs corresponding to (self.representation_fn.inputs, label).
training: Boolean to decide whether to train model for a step.
strategy: (Optional) tf.distribute.Strategy object in case of distributed training.
augmentations: (Optional) List of augmentations we want to apply to each input.
Returns:
loss: Tensor corresponding to the loss over a step.
"""
if strategy is not None:
loss_per_sub_batch = strategy.experimental_run_v2(self.step_fn,
args=(inputs, training, strategy.num_replicas_in_sync,
augmentations))
loss = strategy.reduce(
tf.distribute.ReduceOp.SUM, loss_per_sub_batch, axis=None)
else:
loss = self.step_fn(inputs, training, num_replica=1, augmentations=augmentations)
return loss
def train(self, data_iterator, training_steps, loss_weights=(0.5, 0.5), strategy=None, summary_writer=None,
checkpoint_manager=None, validation_config=None, augmentations=[]):
"""Custom train loop for the add on classifier.
Args:
data_iterator: An iterator yielding batch samples of the data.
training_steps: Integer representing the number of iterations.
loss_weights: weights to apply to loss functions in the case of an imbalanced dataset.
strategy: (Optional) tf.distribute.Strategy object in case of distributed training.
summary_writer: (Optional) tf.summary.Writer for tensorboard logging.
checkpoint_manager: (Optional) tf.train.CheckpointManager for model saving.
validation_config: (Optional) Dictionary with the necessary params for validation.
augmentations: List of augmentations functions to apply to input.
"""
if checkpoint_manager is None:
print('There is no model saving')
# Setting up validation phase if a config is provided
if validation_config is not None:
validation_frequency = validation_config['frequency']
else:
validation_frequency = training_steps + 1
best_val_metric = tf.cast(-np.inf, tf.float32)
plateau = 0
train_loss = 0.0
ts_dir = '/'.join(checkpoint_manager.directory.split('/')[:-1] + ['tensorboard'])
for step in range(training_steps):
if step == 1000 and self.profile:
print('Open profiling')
tf.profiler.experimental.start(ts_dir)
batch = data_iterator.next()
with summary_writer.as_default():
train_loss += self.model_fn(batch, training=True, strategy=strategy,
augmentations=augmentations)
if (step + 1) % validation_frequency == 0 and step != 0:
if summary_writer:
with summary_writer.as_default():
tf.summary.scalar("train_loss", train_loss / validation_frequency,
step=self.optimizer.iterations)
for name, metric in self.metrics.items():
tf.summary.scalar("train_" + name, metric.result(), step=self.optimizer.iterations)
metric.reset_states()
train_loss = 0.0
val_metrics = self.validate(validation_config['data_iterator'],
strategy=strategy,
summary_writer=summary_writer)
if checkpoint_manager and (val_metrics[self.stopping_metric] > best_val_metric or self.patience <= 0):
best_val_metric = val_metrics[self.stopping_metric]
print('Saving model at step {}'.format(step))
checkpoint_manager.save()
plateau = 0
else:
plateau += 1
if plateau >= self.patience and self.patience > 0:
print('Loss dit not improve for {} testing step'.format(self.patience))
break
if step == 1005 and self.profile:
print('Close profiling')
tf.profiler.experimental.stop()
def validate(self, data_iterator, strategy=None, summary_writer=None):
if summary_writer is None:
print('There is no summary writer')
loss = 0.0
num_batch = 0.0
for batch in data_iterator:
loss += self.model_fn(batch, training=False, strategy=strategy)
num_batch += 1.0
metrics = {'loss': - loss / num_batch}
for name, metric in self.metrics.items():
metrics[name] = metric.result()
metric.reset_states()
if summary_writer:
with summary_writer.as_default():
tf.summary.scalar("val_loss", - metrics['loss'], step=self.optimizer.iterations)
for name, metric in metrics.items():
if name != 'loss':
tf.summary.scalar("val_" + name, metric, step=self.optimizer.iterations)
return metrics
def compute_metrics(self, data_iterator):
for name, metric in self.metrics.items():
metric.reset_states()
for batch in data_iterator:
if len(batch) == 2:
data, labels = batch
else:
data = batch[:2]
labels = batch[-1]
labels = tf.expand_dims(labels, axis=-1)
predictions = self.predict(data)
labels = tf.expand_dims(labels, axis=-1)
binned_labels = tf.raw_ops.Bucketize(input=labels, boundaries=self.bins)
binned_pred = tf.math.argmax(predictions, axis=-1)
for name, metric in self.metrics.items():
if name == 'kappa':
metric.update_state(binned_labels[:, 0], binned_pred)
else:
metric.update_state(binned_labels, predictions)
results = {name: metric.result() for name, metric in self.metrics.items()}
fig = None
return results, fig
def predict(self, data):
embeddings = self.representation_fn(data, training=False)
predictions = self.classifier(embeddings, training=False)
return predictions
| 47.442495 | 120 | 0.604651 | 2,687 | 24,338 | 5.324525 | 0.108671 | 0.023625 | 0.019571 | 0.022646 | 0.902495 | 0.885441 | 0.873419 | 0.862934 | 0.831132 | 0.81911 | 0 | 0.009281 | 0.309393 | 24,338 | 512 | 121 | 47.535156 | 0.841921 | 0.179472 | 0 | 0.788889 | 0 | 0 | 0.035254 | 0.001392 | 0 | 0 | 0 | 0 | 0 | 1 | 0.044444 | false | 0 | 0.016667 | 0 | 0.097222 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c5a0f9fbbca0cee97592bf52635086072ada87fd | 20,047 | py | Python | python/swagger_client/api/secretaries_api.py | alextselegidis/easyappointments-sdk | 8ba969dc1221ea614b70d4d52313f20fc85df1e1 | [
"CC-BY-3.0"
] | null | null | null | python/swagger_client/api/secretaries_api.py | alextselegidis/easyappointments-sdk | 8ba969dc1221ea614b70d4d52313f20fc85df1e1 | [
"CC-BY-3.0"
] | null | null | null | python/swagger_client/api/secretaries_api.py | alextselegidis/easyappointments-sdk | 8ba969dc1221ea614b70d4d52313f20fc85df1e1 | [
"CC-BY-3.0"
] | null | null | null | # coding: utf-8
"""
Easy!Appointments API
These are the OpenAPI specs that describe the REST API of Easy!Appointments. # noqa: E501
OpenAPI spec version: 1.0.0
Contact: info@easyappointments.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class SecretariesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def secretaries_get(self, **kwargs): # noqa: E501
"""Get all secretaries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page:
:param int length:
:param str sort:
:param str q:
:param str fields:
:param str _with:
:return: SecretaryCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.secretaries_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.secretaries_get_with_http_info(**kwargs) # noqa: E501
return data
def secretaries_get_with_http_info(self, **kwargs): # noqa: E501
"""Get all secretaries # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page:
:param int length:
:param str sort:
:param str q:
:param str fields:
:param str _with:
:return: SecretaryCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'length', 'sort', 'q', 'fields', '_with'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method secretaries_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'length' in params:
query_params.append(('length', params['length'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if '_with' in params:
query_params.append(('with', params['_with'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/secretaries', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecretaryCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def secretaries_post(self, body, **kwargs): # noqa: E501
"""Create a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretaryPayload body: (required)
:return: SecretaryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.secretaries_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.secretaries_post_with_http_info(body, **kwargs) # noqa: E501
return data
def secretaries_post_with_http_info(self, body, **kwargs): # noqa: E501
"""Create a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretaryPayload body: (required)
:return: SecretaryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method secretaries_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `secretaries_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/secretaries', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecretaryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def secretaries_secretary_id_delete(self, secretary_id, **kwargs): # noqa: E501
"""Delete a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_secretary_id_delete(secretary_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int secretary_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.secretaries_secretary_id_delete_with_http_info(secretary_id, **kwargs) # noqa: E501
else:
(data) = self.secretaries_secretary_id_delete_with_http_info(secretary_id, **kwargs) # noqa: E501
return data
def secretaries_secretary_id_delete_with_http_info(self, secretary_id, **kwargs): # noqa: E501
"""Delete a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_secretary_id_delete_with_http_info(secretary_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int secretary_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['secretary_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method secretaries_secretary_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'secretary_id' is set
if ('secretary_id' not in params or
params['secretary_id'] is None):
raise ValueError("Missing the required parameter `secretary_id` when calling `secretaries_secretary_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'secretary_id' in params:
path_params['secretaryId'] = params['secretary_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/secretaries/{secretaryId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def secretaries_secretary_id_get(self, secretary_id, **kwargs): # noqa: E501
"""Get a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_secretary_id_get(secretary_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int secretary_id: (required)
:return: SecretaryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.secretaries_secretary_id_get_with_http_info(secretary_id, **kwargs) # noqa: E501
else:
(data) = self.secretaries_secretary_id_get_with_http_info(secretary_id, **kwargs) # noqa: E501
return data
def secretaries_secretary_id_get_with_http_info(self, secretary_id, **kwargs): # noqa: E501
"""Get a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_secretary_id_get_with_http_info(secretary_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int secretary_id: (required)
:return: SecretaryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['secretary_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method secretaries_secretary_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'secretary_id' is set
if ('secretary_id' not in params or
params['secretary_id'] is None):
raise ValueError("Missing the required parameter `secretary_id` when calling `secretaries_secretary_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'secretary_id' in params:
path_params['secretaryId'] = params['secretary_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/secretaries/{secretaryId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecretaryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def secretaries_secretary_id_put(self, body, secretary_id, **kwargs): # noqa: E501
"""Update a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_secretary_id_put(body, secretary_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretaryPayload body: (required)
:param int secretary_id: (required)
:return: SecretaryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.secretaries_secretary_id_put_with_http_info(body, secretary_id, **kwargs) # noqa: E501
else:
(data) = self.secretaries_secretary_id_put_with_http_info(body, secretary_id, **kwargs) # noqa: E501
return data
def secretaries_secretary_id_put_with_http_info(self, body, secretary_id, **kwargs): # noqa: E501
"""Update a secretary # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.secretaries_secretary_id_put_with_http_info(body, secretary_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param SecretaryPayload body: (required)
:param int secretary_id: (required)
:return: SecretaryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'secretary_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method secretaries_secretary_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `secretaries_secretary_id_put`") # noqa: E501
# verify the required parameter 'secretary_id' is set
if ('secretary_id' not in params or
params['secretary_id'] is None):
raise ValueError("Missing the required parameter `secretary_id` when calling `secretaries_secretary_id_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'secretary_id' in params:
path_params['secretaryId'] = params['secretary_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/secretaries/{secretaryId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SecretaryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.039848 | 138 | 0.609069 | 2,246 | 20,047 | 5.172306 | 0.075245 | 0.066282 | 0.047344 | 0.030989 | 0.933632 | 0.915985 | 0.908668 | 0.894551 | 0.8893 | 0.88095 | 0 | 0.015163 | 0.299297 | 20,047 | 526 | 139 | 38.112167 | 0.811846 | 0.303487 | 0 | 0.766784 | 0 | 0 | 0.187646 | 0.047959 | 0 | 0 | 0 | 0 | 0 | 1 | 0.038869 | false | 0 | 0.014134 | 0 | 0.109541 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c5a2035516588b2a36780a85f74899b1f89fb73f | 1,016 | py | Python | tstudent/plot_mcmc_student.py | karlnapf/kernel_goodness_of_fit | b76ad54481475df5f061615b0922dec812d48eda | [
"BSD-3-Clause"
] | 21 | 2016-12-20T17:27:14.000Z | 2022-02-11T17:10:05.000Z | tstudent/plot_mcmc_student.py | zhushy/kernel_goodness_of_fit | b76ad54481475df5f061615b0922dec812d48eda | [
"BSD-3-Clause"
] | null | null | null | tstudent/plot_mcmc_student.py | zhushy/kernel_goodness_of_fit | b76ad54481475df5f061615b0922dec812d48eda | [
"BSD-3-Clause"
] | 6 | 2018-01-27T07:14:53.000Z | 2022-03-06T19:49:15.000Z | import numpy as np
import seaborn
from pandas import DataFrame
from tools.latex_plot_init import plt
results = np.load('results_good.npy')
df = DataFrame(results)
plt.figure()
seaborn.set_style("whitegrid")
seaborn.boxplot(x=0, y=1, data=df,palette="BuGn_d")
plt.tight_layout()
plt.ylabel('p values')
plt.ylim([0,1])
plt.xlabel('degrees of freedom')
plt.savefig('../write_up/img/sgld_student.pdf')
results = np.load('results_bad.npy')
df = DataFrame(results)
plt.figure()
seaborn.set_style("whitegrid")
seaborn.boxplot(x=0, y=1, data=df,palette="BuGn_d")
plt.tight_layout()
plt.ylabel('p values')
plt.ylim([0,1])
plt.xlabel('degrees of freedom')
plt.savefig('../write_up/img/sgld_student_bad.pdf')
results = np.load('results_thinning.npy')
df = DataFrame(results)
plt.figure()
seaborn.set_style("whitegrid")
seaborn.boxplot(x=0, y=1, data=df,palette="BuGn_d")
plt.tight_layout()
plt.ylabel('p values')
plt.ylim([0,1])
plt.xlabel('degrees of freedom')
plt.savefig('../write_up/img/sgld_student_opt.pdf') | 21.166667 | 51 | 0.739173 | 169 | 1,016 | 4.313609 | 0.301775 | 0.037037 | 0.053498 | 0.082305 | 0.824417 | 0.761317 | 0.761317 | 0.761317 | 0.761317 | 0.761317 | 0 | 0.012876 | 0.082677 | 1,016 | 48 | 52 | 21.166667 | 0.769313 | 0 | 0 | 0.705882 | 0 | 0 | 0.273353 | 0.102262 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.117647 | 0 | 0.117647 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c5ab0c2fb56a47fe58aed63391554f97b876c74d | 68,639 | py | Python | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_perlbench/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_perlbench/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_perlbench/power.py | TugberkArkose/MLScheduler | e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061 | [
"Unlicense"
] | null | null | null | power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 1.88938e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.26502e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.563363,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.975541,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.5595,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.0984,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.55686,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.98507,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 2.38989e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0204223,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.14768,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.151036,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.147683,
'Execution Unit/Register Files/Runtime Dynamic': 0.171458,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.356856,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.936955,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.81488,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0056209,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0056209,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00489046,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00189026,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00216964,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0183019,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0540834,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.145195,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.575264,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.493146,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.28599,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0654535,
'L2/Runtime Dynamic': 0.0125114,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.79175,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.19374,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.147353,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.147353,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 6.49042,
'Load Store Unit/Runtime Dynamic': 3.06779,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.363348,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.726695,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.128953,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.129516,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0955512,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.781429,
'Memory Management Unit/Runtime Dynamic': 0.225067,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 26.8528,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 8.78917e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0288074,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.293679,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.322495,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 8.72873,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 1.88938e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.01201e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.185536,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.299263,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.151058,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.635857,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.212198,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.23814,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.91191e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00778222,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.056276,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0575543,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.056278,
'Execution Unit/Register Files/Runtime Dynamic': 0.0653365,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.118558,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.311264,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.62052,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00239405,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00239405,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00214639,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000864364,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000826772,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00776125,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.020768,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0553284,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.51936,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.21801,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.18792,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.90868,
'Instruction Fetch Unit/Runtime Dynamic': 0.489788,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0320371,
'L2/Runtime Dynamic': 0.00602815,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.00256,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.851114,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0571162,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0571162,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.27228,
'Load Store Unit/Runtime Dynamic': 1.18991,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.140839,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.281678,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0499842,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0502636,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.218821,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0363371,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.460794,
'Memory Management Unit/Runtime Dynamic': 0.0866006,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.5014,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 4.87886e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00837094,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0946763,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.103052,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.4959,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 9.4469e-07,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.01201e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.191605,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.309051,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.155999,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.656654,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.219138,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.25172,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.91191e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00803676,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.058116,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0594367,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0581179,
'Execution Unit/Register Files/Runtime Dynamic': 0.0674735,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.122434,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.32144,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.65363,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0024603,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0024603,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00220401,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000886624,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000853813,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00797842,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0214063,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.057138,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.63447,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.225991,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.194067,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.02938,
'Instruction Fetch Unit/Runtime Dynamic': 0.50658,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0317878,
'L2/Runtime Dynamic': 0.00608883,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.0571,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.877218,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0588806,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0588806,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.33514,
'Load Store Unit/Runtime Dynamic': 1.22648,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.145189,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.290379,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0515282,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0517978,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.225978,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0376634,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.470603,
'Memory Management Unit/Runtime Dynamic': 0.0894612,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.7081,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 4.63626e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00864473,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0977836,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.106433,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.58868,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 1.88938e-06,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.20269,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 1.01201e-05,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.183727,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.296346,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.149585,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.629658,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.210129,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.23408,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 1.91191e-06,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00770636,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0557273,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0569932,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0557293,
'Execution Unit/Register Files/Runtime Dynamic': 0.0646996,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.117402,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.308995,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.61142,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00236611,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00236611,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00212313,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000855944,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000818712,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00767406,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0204619,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.054789,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.48505,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.216985,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.186088,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.87271,
'Instruction Fetch Unit/Runtime Dynamic': 0.485998,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0345971,
'L2/Runtime Dynamic': 0.00670946,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.02013,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.859643,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0576845,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0576846,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.29253,
'Load Store Unit/Runtime Dynamic': 1.20181,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.14224,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.284481,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0504815,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0507801,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.216688,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0362258,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.459515,
'Memory Management Unit/Runtime Dynamic': 0.087006,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.4829,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 4.96271e-06,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00828934,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0937804,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.102075,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.49502,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.8232012239197717,
'Runtime Dynamic': 0.8232012239197717,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.08898,
'Runtime Dynamic': 0.0388505,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 79.6342,
'Peak Power': 112.746,
'Runtime Dynamic': 19.3472,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 79.5452,
'Total Cores/Runtime Dynamic': 19.3083,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.08898,
'Total L3s/Runtime Dynamic': 0.0388505,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}} | 75.097374 | 124 | 0.681988 | 8,098 | 68,639 | 5.774636 | 0.06656 | 0.123516 | 0.11291 | 0.093407 | 0.941385 | 0.932789 | 0.920151 | 0.892415 | 0.867352 | 0.848705 | 0 | 0.131836 | 0.224231 | 68,639 | 914 | 125 | 75.097374 | 0.746375 | 0 | 0 | 0.656455 | 0 | 0 | 0.657124 | 0.048077 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
c5d4677aa041e13d7ac1a00bb8367854494d8db0 | 152 | py | Python | python/ray/sgd/__init__.py | linyiyue/ray | 90d2456ec70270a1f894ec3ef6f3004533859e03 | [
"Apache-2.0"
] | 1 | 2021-02-14T01:53:37.000Z | 2021-02-14T01:53:37.000Z | python/ray/sgd/__init__.py | linyiyue/ray | 90d2456ec70270a1f894ec3ef6f3004533859e03 | [
"Apache-2.0"
] | 77 | 2021-02-13T08:07:09.000Z | 2022-03-19T07:08:45.000Z | python/ray/sgd/__init__.py | linyiyue/ray | 90d2456ec70270a1f894ec3ef6f3004533859e03 | [
"Apache-2.0"
] | 1 | 2019-07-25T23:01:38.000Z | 2019-07-25T23:01:38.000Z | from ray.util.sgd.v2 import * # noqa: F401, F403
from ray.util.sgd.v2.callbacks import JsonLoggerCallback, TBXLoggerCallback # noqa: E501, F401, F403
| 50.666667 | 101 | 0.763158 | 22 | 152 | 5.272727 | 0.590909 | 0.12069 | 0.189655 | 0.241379 | 0.275862 | 0 | 0 | 0 | 0 | 0 | 0 | 0.128788 | 0.131579 | 152 | 2 | 102 | 76 | 0.75 | 0.256579 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
c5f0a06a82c89017c66b17c5268cadb8275e7b09 | 81 | py | Python | pyorama/asset/__init__.py | AnishN/Pyorama | e16389336e1c4969165967fe208b5b260188f57f | [
"MIT"
] | null | null | null | pyorama/asset/__init__.py | AnishN/Pyorama | e16389336e1c4969165967fe208b5b260188f57f | [
"MIT"
] | null | null | null | pyorama/asset/__init__.py | AnishN/Pyorama | e16389336e1c4969165967fe208b5b260188f57f | [
"MIT"
] | null | null | null | from pyorama.asset.asset_system import *
from pyorama.asset.asset_queue import * | 40.5 | 41 | 0.82716 | 12 | 81 | 5.416667 | 0.5 | 0.338462 | 0.492308 | 0.646154 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.098765 | 81 | 2 | 42 | 40.5 | 0.890411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
c5f4c34e0d39d88817f8a3059dabba654037a403 | 140 | py | Python | dataservice/api/cavatica_task/__init__.py | ConnorBarnhill/kf-api-dataservice | 547df467a307788882469a25c947a14965a26336 | [
"Apache-2.0"
] | null | null | null | dataservice/api/cavatica_task/__init__.py | ConnorBarnhill/kf-api-dataservice | 547df467a307788882469a25c947a14965a26336 | [
"Apache-2.0"
] | null | null | null | dataservice/api/cavatica_task/__init__.py | ConnorBarnhill/kf-api-dataservice | 547df467a307788882469a25c947a14965a26336 | [
"Apache-2.0"
] | null | null | null | from dataservice.api.cavatica_task.resources import CavaticaTaskAPI
from dataservice.api.cavatica_task.resources import CavaticaTaskListAPI
| 46.666667 | 71 | 0.9 | 16 | 140 | 7.75 | 0.5625 | 0.241935 | 0.290323 | 0.419355 | 0.725806 | 0.725806 | 0.725806 | 0 | 0 | 0 | 0 | 0 | 0.057143 | 140 | 2 | 72 | 70 | 0.939394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
a853a43db70131034a3cb4f4ab2cb08863a86e64 | 179,334 | py | Python | parser/fase2/team13/Graficar.py | Gabriel-15/tytus | fb00718bf3fcc5211a3604fba1a551f44bdc6deb | [
"MIT"
] | 35 | 2020-12-07T03:11:43.000Z | 2021-04-15T17:38:16.000Z | parser/fase2/team13/Graficar.py | Gabriel-15/tytus | fb00718bf3fcc5211a3604fba1a551f44bdc6deb | [
"MIT"
] | 47 | 2020-12-09T01:29:09.000Z | 2021-01-13T05:37:50.000Z | parser/fase2/team13/Graficar.py | Gabriel-15/tytus | fb00718bf3fcc5211a3604fba1a551f44bdc6deb | [
"MIT"
] | 556 | 2020-12-07T03:13:31.000Z | 2021-06-17T17:41:10.000Z |
class Node():
def __init__(self,Etiqueta="",Valor="",idNod=0,Fila=0,Columna=0):
self.Etiqueta=Etiqueta
self.Valor=Valor
self.idNod=idNod
self.Fila=Fila
self.Columna=Columna
self.hijos=[]
def AddHijos(self,son):
self.hijos.append(son)
def getHijos(self):
return self.hijos
#------------- GRAMATICA ASCEDENTE----------------------
# LISTA DE PALABRAS RESERVADAS
reservadas = {
# Numeric Types
'smallint': 'tSmallint',
'integer': 'tInteger',
'bigint': 'tBigint',
'decimal': 'tDecimal',
'numeric': 'tNumeric',
'real': 'tReal',
'double': 'tDouble',
'precision': 'tPrecision',
'money': 'tMoney',
# Character types
'character': 'tCharacter',
'varying': 'tVarying',
'varchar': 'tVarchar',
'char': 'tChar',
'text': 'tText',
# Date/Time Types
'timestamp': 'tTimestamp',
'date': 'tDate',
'time': 'tTime',
'interval': 'tInterval',
# Interval Type
'year': 'tYear',
'month': 'tMonth',
'day': 'tDay',
'hour': 'tHour',
'minute': 'tMinute',
'second': 'tSecond',
'to': 'tTo',
# Boolean Type
'boolean': 'tBoolean',
'false': 'tFalse',
'true': 'tTrue',
'create': 'create',
'database': 'database',
'or': 'or',
'replace': 'replace',
'if': 'if',
'not': 'not',
'exists': 'exists',
'databases': 'databases',
'drop': 'drop',
'owner': 'owner',
'mode': 'mode',
'alter': 'alter',
'show': 'show',
'like': 'like',
'insert': 'insert',
'values': 'values',
'null': 'null',
'primarykey': 'primarykey',
'into': 'into',
'from': 'from',
'where': 'where',
'as': 'as',
'select': 'select',
'update': 'tUpdate',
'set': 'tSet',
'delete': 'tDelete',
'truncate': 'tTruncate',
'table': 'table',
'tables': 'tables',
'between': 'tBetween',
'rename': 'rename',
'isNull': 'isNull',
'in': 'tIn',
'iLike': 'tILike',
'similar': 'tSimilar',
'is': 'tIs',
'notNull': 'notNull',
'and': 'And',
'current_user': 'currentuser',
'session_user': 'sessionuser',
'type': 'ttype',
'enum': 'tenum',
'yes': 'yes',
'no': 'no',
'on': 'on',
'off': 'off',
# >inicia fl
'inherits': 'tInherits',
'default': 'tDefault',
'primary': 'tPrimary',
'foreign': 'tForeign',
'key': 'tKey',
'references': 'tReferences',
'check': 'tCheck',
'constraint': 'tConstraint',
'unique': 'tUnique',
'column': 'tColumn',
'add': 'add',
# >termina fl
'no': 'no',
'yes': 'yes',
'on': 'on',
'off': 'off',
# TOKENS QUERIES
'distinct': 'distinct',
'group': 'group',
'by': 'by',
'having': 'having',
# agregacion
'count': 'count',
'avg': 'avg',
'max': 'max',
'min': 'min',
'sum': 'sum',
# matematicas
'abs': 'abs',
'cbrt': 'cbrt',
'ceil': 'ceil',
'ceiling': 'ceiling',
'degrees': 'degrees',
'div': 'div',
'exp': 'exp',
'factorial': 'factorial',
'floor': 'floor',
'gcd': 'gcd',
'lcm': 'lcm',
'ln': 'ln',
'log': 'log',
'log10': 'log10',
'min_scale': 'min_scale',
'mod': 'mod',
'pi': 'pi',
'power': 'power',
'radians': 'radians',
'round': 'round',
'scale': 'scale',
'sign': 'sign',
'sqrt': 'sqrt',
'trim_scale': 'trim_scale',
'trunc': 'trunc',
'width_bucket': 'width_bucket',
'random': 'random',
'setseed': 'setseed',
# trigonometricas
'acos': 'acos',
'acosd': 'acosd',
'asin': 'asin',
'asind': 'asind',
'atan': 'atan',
'atand': 'atand',
'atan2': 'atan2',
'atan2d': 'atan2d',
'cos': 'cos',
'cosd': 'cosd',
'cot': 'cot',
'cotd': 'cotd',
'sin': 'sin',
'sind': 'sind',
'tan': 'tan',
'tand':'tand',
'sinh': 'sinh',
'cosh': 'cosh',
'tanh': 'tanh',
'asinh': 'asinh',
'acosh': 'acosh',
'atanh': 'atanh',
# binary
'length': 'length',
'substring': 'substring',
'trim': 'trim',
'get_byte': 'get_byte',
'md5': 'md5',
'set_byte': 'set_byte',
'sha256': 'sha256',
'substr': 'substr',
'convert': 'convert',
'encode': 'encode',
'decode': 'decode',
# otros
'all': 'all',
'any': 'any',
'some': 'some',
# EXPRESSIONS
'case': 'case',
'when': 'when',
'then': 'then',
'else': 'else',
'end': 'end',
'greatest': 'greatest',
'least': 'least',
'limit': 'limit',
'offset': 'offset',
'union': 'union',
'except': 'except',
'intersect': 'intersect',
# otros
'date_part': 'date_part',
'now': 'now',
'current_date': 'current_date',
'current_time': 'current_time',
'extract': 'tExtract',
'in': 'in'
#nuevos -10
,'asc':'asc',
'desc':'desc',
'nulls':'nulls',
'first':'first',
'last':'last',
'order':'order',
'use': 'tuse',
'unknown':'unknown',
'bytea':'bytea',
# nuevos
'return': 'treturn',
'returns': 'returns',
'declare': 'declare',
'begin': 'begin',
'function': 'function',
'language': 'language',
'for': 'tfor',
'alias': 'talias',
'loop': 'loop',
'while': 'twhile',
'do': 'do',
'elsif':'elsif',
'continue':'tcontinue',
'exit':'texit',
'raise':'raise',
'notice':'notice',
'rowtype':'rowtype',
'procedure':'procedure',
'next':'next',
'out': 'out',
'constant': 'constant',
'query': 'tquery',
'inout': 'inout',
'state': 'state',
'lower': 'lower',
'using': 'using',
'index':'index',
'hash': 'hash',
'include': 'include',
'execute':'execute'
}
# LISTA DE TOKENS
tokens = [
'punto',
'dosPts',
'corcheI',
'corcheD',
'mas',
'menos',
'elevado',
'multi',
'divi',
'modulo',
'igual',
'menor',
'mayor',
'menorIgual',
'mayorIgual',
'diferente',
'id',
'decimal',
'entero',
'cadena',
'cadenaLike',
'parAbre',
'parCierra',
'coma',
'ptComa',
# tks
'barra',
'barraDoble',
'amp',
'numeral',
'virgulilla',
'mayormayor',
'menormenor',
#TOKENS PARA EL RECONOCIMIENTO DE FECHA Y HORA
'fecha',
'hora',
'fecha_hora',
'intervaloc',
'notEqual',
'dobledolar',
'val',
'asig'
] + list(reservadas.values())
# DEFINICIÓN DE TOKENS
t_punto = r'\.'
t_dosPts = r':'
t_corcheI = r'\['
t_corcheD = r'\]'
t_mas = r'\+'
t_menos = r'-'
t_elevado = r'\^'
t_multi = r'\*'
t_divi = r'/'
t_modulo = r'%'
t_igual = r'='
t_menor = r'<'
t_mayor = r'>'
t_menorIgual = r'<='
t_mayorIgual = r'>='
t_diferente = r'<>'
t_parAbre = r'\('
t_parCierra = r'\)'
t_coma = r','
t_ptComa = r';'
# tk_queries
t_barra = r'\|'
t_barraDoble = r'\|\|'
t_amp = r'&'
t_numeral = r'\?'
t_virgulilla = r'~'
t_mayormayor = r'>>'
t_menormenor = r'<<'
t_notEqual = r'!='
t_dobledolar = r'\$\$'
t_asig = r':='
# DEFINICIÓN DE UN NÚMERO DECIMAL
def t_decimal(t):
r'\d+\.\d+'
try:
t.value = float(t.value)
except ValueError:
print("Floaat value too large %d", t.value)
t.value = 0
return t
# DEFINICIÓN DE UN NÚMERO ENTERO
def t_entero(t):
r'\d+'
try:
t.value = int(t.value)
except ValueError:
print("Integer value too large %d", t.value)
t.value = 0
return t
# DEFINICION PARA INTERVALO
def t_intervaloc(t):
r'\'[\d+\s(Year|Years|Month|Months|day|days|hour|hours|minute|minutes|second|seconds)]+\''
return t
#DEFINICIÓN PARA LA HORA
def t_hora(t):
r'\'[0-2]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9]\''
return t
#DEFINICIÓN PARA LA FECHA
def t_fecha(t):
r'\'[0-9]{4}-[0-1]?[0-9]-[0-3]?[0-9]\''
return t
#DEFINICIÓN PARA TIMESTAMP
def t_fecha_hora(t):
r'\'([0-9]{4}-[0-1]?[0-9]-[0-3]?[0-9])(\s)([0-2]?[0-9]:[0-5]?[0-9]:[0-5]?[0-9])\''
return t
# DEFINICIÓN DE UNA CADENA PARA LIKE
def t_cadenaLike(t):
r'\'%.*?%\'|\"%.*?%\"'
t.value = t.value[2:-2]
return t
# DEFINICIÓN DE UNA CADENA
def t_cadena(t):
r'\'.*?\'|\".*?\"'
t.value = t.value[1:-1]
return t
# DEFINICIÓN DE UN ID
def t_val(t):
r'\$\d+'
t.value = t.value[1:-1]
return t
# DEFINICIÓN DE UN ID
def t_id(t):
r'[a-zA-Z]([a-zA-Z]|[0-9]|_)*'
t.type = reservadas.get(t.value.lower(), 'id')
return t
# Comentario de múltiples líneas /* .. */
def t_COMENTARIO_MULTILINEA(t):
r'/\*(.|\n)*?\*/'
t.lexer.lineno += t.value.count('\n')
# DEFINICIÓN DE UN COMENTARIO SIMPLE
def t_COMENTARIO_SIMPLE(t):
r'--.*'
#t.lexer.lineno += 1 # Descartamos la linea desde aca
# IGNORAR COMENTARIOS SIMPLES
t_ignore_COMENTARIO_SIMPLE = r'\#.*'
# Caracteres ignorados
t_ignore = " \t"
def t_newline(t):
r'\n+'
t.lexer.lineno += t.value.count("\n")
def t_error(t):
t.lexer.skip(1)
print("Caracter inválido '%s'" % t.value[0], " Línea: '%s'" % str(t.lineno))
# Construyendo el analizador léxico
import ply.lex as lex
import re
# DEFINIENDO LA PRECEDENCIA DE LOS OPERADORES
# ---------Modificado Edi------
precedence = (
('right', 'not'),
('left', 'And'),
('left', 'or','barraDoble' ),
('left', 'diferente','notEqual', 'igual', 'mayor', 'menor', 'menorIgual', 'mayorIgual'),
('left', 'punto'),
('right', 'umenos'),
('left', 'mas', 'menos'),
('left', 'elevado'),
('left', 'multi', 'divi', 'modulo'),
('nonassoc', 'parAbre', 'parCierra')
)
# ---------Modificado Edi---------
# <<<<<<<<<<<<<<<<<<<<<<<<<<< INICIO DE LAS PRODUCCIONES <<<<<<<<<<<<<<<<<<<<<<<<<<<<
from sentencias import *
from graphviz import render
cont = 0
concat=""
lista=[]
def p_init(t):
'inicio : sentencias'
global cont
raiz = Node("INICIO","",cont,0,0)
cont=cont+1
raiz.AddHijos(t[1])
t[0]= raiz
lista.append("<INICIO> :: = <sentencias> \n")
print("Lectura Finalizada")
def p_sentencias_lista(t):
'sentencias : sentencias sentencia'
t[0]=t[1]
t[0].AddHijos(t[2])
lista.append("<SENTENCIAS> :: = <SENTENCIAS> <SENTENCIA> \n")
def p_sentencias_sentencia(t):
'sentencias : sentencia'
global cont
t[0] = Node("SENTENCIAS","",cont,0,0)
cont=cont+1
t[0].AddHijos(t[1])
lista.append("<SENTENCIAS> :: = <SENTENCIAS> \n")
def p_sentencia(t):
'''sentencia : CrearBase
| ShowBase
| AlterBase
| DropBase
| EnumType
| UpdateBase
| DeleteBase
| TruncateBase
| CREATE_TABLE
| SHOW_TABLES
| ALTER_TABLE
| DROP_TABLE
| INSERT
| QUERY ptComa
| USEDB
| CREATE_FUNCION
| BLOCKDO
| CREATE_INDEX
'''
t[0] =t[1]
lista.append("<SENTENCIAS> :: = < "+str(t[1].Etiqueta)+">\n")
# <<<<<<<<<<<<<<<<<<<<<<<<<<< Edi Yovani Tomas <<<<<<<<<<<<<<<<<<<<<<<<<<<<
def p_BLOCKDO(t):
''' BLOCKDO : dobledolar BLOQUE dobledolar ptComa
'''
global cont
t[0] = Node("BLOCKDO","",cont,0,0)
cont = cont+1
nodo1 = Node("dobledolar", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_CrearFunciones1(t):
''' CREATE_FUNCION : TIPOFUNCION id parAbre L_PARAMETROS parCierra returns TIPO as dobledolar BLOQUE dobledolar language id ptComa
'''
global cont
t[0] = Node("CREATE_FUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("returns",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo4 = Node("as",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo5 = Node("dobledolar", t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
nodo6 = Node("dobledolar", t[11],cont,t.lineno(11) ,t.lexpos(11))
cont = cont+1
nodo7 = Node("lenguage", t[12],cont,t.lineno(12) ,t.lexpos(12))
cont = cont+1
nodo8 = Node("id", t[13],cont,t.lineno(13) ,t.lexpos(13))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(t[4])
t[0].AddHijos(nodo3)
t[0].AddHijos(t[7])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[10])
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_CrearFunciones2(t):
''' CREATE_FUNCION : TIPOFUNCION id parAbre parCierra returns TIPO as dobledolar BLOQUE dobledolar language id ptComa
'''
global cont
t[0] = Node("CREATE_FUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("returns",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo4 = Node("as",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo5 = Node("dobledolar", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo6 = Node("dobledolar", t[10],cont,t.lineno(10) ,t.lexpos(10))
cont = cont+1
nodo7 = Node("lenguage", t[11],cont,t.lineno(11) ,t.lexpos(11))
cont = cont+1
nodo8 = Node("id", t[12],cont,t.lineno(12) ,t.lexpos(12))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[9])
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_CrearFunciones3(t):
''' CREATE_FUNCION : TIPOFUNCION id parAbre L_PARAMETROS parCierra returns TIPO as dobledolar BLOQUE dobledolar ptComa
'''
global cont
t[0] = Node("CREATE_FUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("returns",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo4 = Node("as",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo5 = Node("dobledolar", t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
nodo6 = Node("dobledolar", t[11],cont,t.lineno(11) ,t.lexpos(11))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(t[4])
t[0].AddHijos(nodo3)
t[0].AddHijos(t[7])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[10])
t[0].AddHijos(nodo6)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_CrearFunciones4(t):
''' CREATE_FUNCION : TIPOFUNCION id parAbre parCierra returns TIPO as dobledolar BLOQUE dobledolar ptComa
'''
global cont
t[0] = Node("CREATE_FUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("returns",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo4 = Node("as",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo5 = Node("dobledolar", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo6 = Node("dobledolar", t[10],cont,t.lineno(10) ,t.lexpos(10))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[9])
t[0].AddHijos(nodo6)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_TIPOFUNCION1(t):
''' TIPOFUNCION : create function
'''
global cont
t[0] = Node("TIPOFUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo3 = Node("function",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_TIPOFUNCION2(t):
''' TIPOFUNCION : create procedure
'''
global cont
t[0] = Node("TIPOFUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo3 = Node("procedure",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_TIPOFUNCION3(t):
''' TIPOFUNCION : create or replace function
'''
global cont
t[0] = Node("TIPOFUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("or",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("replace",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("function",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_TIPOFUNCION4(t):
''' TIPOFUNCION : create or replace procedure
'''
global cont
t[0] = Node("TIPOFUNCION","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("or",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("replace",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("procedure",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_BLOQUE(t):
''' BLOQUE : DECLARE STATEMENT
'''
global cont
t[0] = Node("BLOQUE","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_BLOQUE1(t):
''' BLOQUE : DECLARE
| STATEMENT
'''
global cont
t[0] = Node("BLOQUE","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_DECLARE(t):
''' DECLARE : declare BODYDECLARE
'''
global cont
t[0] = Node("DECLARE","",cont,0,0)
cont = cont+1
nodo1 = Node("declare", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_BODYDECLARE(t):
''' BODYDECLARE : BODYDECLARE DECLARATION
'''
global cont
t[0] = Node("BODYDECLARE","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_BODYDECLARE1(t):
''' BODYDECLARE : DECLARATION
'''
global cont
t[0] = Node("BODYDECLARE","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_DECLARATION1(t):
''' DECLARATION : NAME_CONSTANT TIPO ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATION2(t):
''' DECLARATION : NAME_CONSTANT TIPO ASIGNAR E ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATION4(t):
''' DECLARATION : NAME_CONSTANT TIPO not null ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("not", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("null",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATION5(t):
''' DECLARATION : NAME_CONSTANT TIPO not null ASIGNAR E ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("not", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("null",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[5])
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATION6(t):
''' DECLARATION : NAME_CONSTANT talias tfor E ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("talias", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("for",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATION7(t):
''' DECLARATION : NAME_CONSTANT ACCESO modulo ttype ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("modulo",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("ttype",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATION8(t):
''' DECLARATION : NAME_CONSTANT id modulo rowtype ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("modulo",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo3 = Node("rowtype",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# *******QQQQQQQQQ SE AGREGO MAS PRODUCCIONES EDI TOMAS
def p_ASIGNACIONES(t):
''' ASIGNACION : id asig parAbre QUERY parCierra ptComa
| id igual parAbre QUERY parCierra ptComa
'''
global cont
t[0] = Node("ASIGNACION","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node(str(t[2]),t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_ASIGNACIONES1(t):
''' ASIGNACION : id asig QUERY ptComa
| id igual QUERY ptComa
'''
global cont
t[0] = Node("ASIGNACION","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node(str(t[2]),t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY1(t):
''' DECLARATION : NAME_CONSTANT TIPO ASIGNAR QUERY ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY2(t):
''' DECLARATION : NAME_CONSTANT ASIGNAR QUERY ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY3(t):
''' DECLARATION : NAME_CONSTANT TIPO not null ASIGNAR QUERY ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("not",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("null",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[5])
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY4(t):
''' DECLARATION : NAME_CONSTANT talias tfor QUERY ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("alias",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("for",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY5(t):
''' DECLARATION : NAME_CONSTANT not null ASIGNAR QUERY ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("not",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("null",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY6(t):
''' DECLARATION : NAME_CONSTANT TIPO ASIGNAR parAbre QUERY parCierra ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY7(t):
''' DECLARATION : NAME_CONSTANT ASIGNAR parAbre QUERY parCierra ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY8(t):
''' DECLARATION : NAME_CONSTANT TIPO not null ASIGNAR parAbre QUERY parCierra ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("not",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("null",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(t[2])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[5])
t[0].AddHijos(t[7])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY9(t):
''' DECLARATION : NAME_CONSTANT talias tfor parAbre QUERY parCierra ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("alias",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("for",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DECLARATIONQUERY10(t):
''' DECLARATION : NAME_CONSTANT not null ASIGNAR parAbre QUERY parCierra ptComa
'''
global cont
t[0] = Node("DECLARATION","",cont,0,0)
cont = cont+1
nodo1 = Node("not",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("null",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
#*********************************************************************************************************************
def p_ACCESO(t):
''' ACCESO : ACCESO punto id
'''
global cont
t[0] = Node("ACCESO","",cont,0,0)
cont = cont+1
nodo1 = Node("punto",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo2 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_ACCESO1(t):
''' ACCESO : id
'''
global cont
t[0] = Node("ACCESO","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_DECLARATION9(t):
''' ASIGNAR : asig
| igual
| tDefault
'''
global cont
t[0] = Node("ASIGNAR","",cont,0,0)
cont = cont+1
nodo1 = Node(str(t[1]),t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_DECLARATION10(t):
''' NAME_CONSTANT : id
'''
global cont
t[0] = Node("NAME_CONSTANT","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_DECLARATION11(t):
''' NAME_CONSTANT : id constant
'''
global cont
t[0] = Node("NAME_CONSTANT","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("constant",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_STATEMENT(t):
''' STATEMENT : begin L_BLOCK end ptComa
'''
global cont
t[0] = Node("STATEMENT","",cont,0,0)
cont = cont+1
nodo1 = Node("begin",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_STATEMENT1(t):
''' STATEMENT : begin end ptComa
'''
global cont
t[0] = Node("STATEMENT","",cont,0,0)
cont = cont+1
nodo1 = Node("begin",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_L_BLOCK(t):
''' L_BLOCK : L_BLOCK BLOCK
'''
t[0] = t[1]
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_L_BLOCK1(t):
''' L_BLOCK : BLOCK
'''
global cont
t[0] = Node("L_BLOCK","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_BLOCK(t):
''' BLOCK : sentencias
| ASIGNACION
| RETORNO
| CONTINUE
| EXIT
| SENTENCIAS_CONTROL
| DECLARACION_RAICENOTE
| STATEMENT
| CALL ptComa
'''
t[0] =t[1]
lista.append("<BLOCK> :: = < "+str(t[1].Etiqueta)+">\n")
def p_CALL3(t):
''' CALL : execute id parAbre LISTA_EXP parCierra
'''
global cont
t[0] = Node("CALL","",cont,0,0)
cont = cont+1
nodo1 = Node("execute",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_CALL4(t):
''' CALL : execute id parAbre parCierra
'''
global cont
t[0] = Node("CALL","",cont,0,0)
cont = cont+1
nodo1 = Node("execute",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_CALL1(t):
''' CALL : id parAbre LISTA_EXP parCierra
'''
global cont
t[0] = Node("CALL","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_CALL2(t):
''' CALL : id parAbre parCierra
'''
global cont
t[0] = Node("CALL","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_ASIGNACION(t):
''' ASIGNACION : id igual E ptComa
'''
global cont
t[0] = Node("ASIGNACION","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("igual",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_ASIGNACION1(t):
''' ASIGNACION : id asig E ptComa
'''
global cont
t[0] = Node("ASIGNACION","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("asig",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_LISTA_PARAMETROS(t):
''' L_PARAMETROS : L_PARAMETROS coma PARAMETROS
'''
t[0] = t[1]
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_LISTA_PARAMETROS1(t):
''' L_PARAMETROS : PARAMETROS
'''
global cont
t[0] = Node("L_PARAMETROS","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_PARAMETROS1(t):
''' PARAMETROS : id TIPO
'''
global cont
t[0] = Node("PARAMETROS","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_PARAMETROS2(t):
''' PARAMETROS : TIPO
'''
global cont
t[0] = Node("PARAMETROS","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_PARAMETROS3(t):
''' PARAMETROS : out id TIPO
| inout id TIPO
'''
global cont
t[0] = Node("PARAMETROS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(t[1]),t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_RETORNO1(t):
''' RETORNO : treturn E ptComa
'''
global cont
t[0] = Node("RETORNO","",cont,0,0)
cont = cont+1
nodo1 = Node("treturn",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_RETORNO2(t):
''' RETORNO : treturn next E ptComa
'''
global cont
t[0] = Node("RETORNO","",cont,0,0)
cont = cont+1
nodo1 = Node("treturn",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("next",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_RETORNO3(t):
''' RETORNO : treturn QUERY ptComa
'''
global cont
t[0] = Node("RETORNO","",cont,0,0)
cont = cont+1
nodo1 = Node("treturn",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_RETORNO4(t):
''' RETORNO : treturn QUERY tquery ptComa
'''
global cont
t[0] = Node("RETORNO","",cont,0,0)
cont = cont+1
nodo1 = Node("treturn",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tquery",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_RETORNO5(t):
''' RETORNO : treturn ptComa
'''
global cont
t[0] = Node("RETORNO","",cont,0,0)
cont = cont+1
nodo1 = Node("treturn",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_CONTINUE(t):
''' CONTINUE : tcontinue EXPR_WHERE ptComa
'''
global cont
t[0] = Node("CONTINUE","",cont,0,0)
cont = cont+1
nodo1 = Node("continue",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXIT1(t):
''' EXIT : texit EXPR_WHERE ptComa
'''
global cont
t[0] = Node("EXIT","",cont,0,0)
cont = cont+1
nodo1 = Node("exit",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXIT2(t):
''' EXIT : texit ptComa
'''
global cont
t[0] = Node("EXIT","",cont,0,0)
cont = cont+1
nodo1 = Node("exit",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXIT3(t):
''' EXIT : texit id ptComa
'''
global cont
t[0] = Node("EXIT","",cont,0,0)
cont = cont+1
nodo1 = Node("exit",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXIT4(t):
''' EXIT : texit id EXPR_WHERE ptComa
'''
global cont
t[0] = Node("EXIT","",cont,0,0)
cont = cont+1
nodo1 = Node("exit",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_OTROSTIPOS(t):
''' OTROSTIPOS : tNumeric parAbre entero parCierra
'''
global cont
t[0] = Node("OTROSTIPOS","",cont,0,0)
cont = cont+1
nodo1 = Node("Numeric",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("entero",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_OTROSTIPOS1(t):
''' OTROSTIPOS : tVarchar
| tChar
'''
global cont
t[0] = Node("OTROSTIPOS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(t[1]),t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_SENTENCIAS_CONTROL(t):
''' SENTENCIAS_CONTROL : IF
| SEARCH_CASE
'''
t[0] =t[1]
lista.append("<SENTENCIAS_CONTROL> :: = < "+str(t[1].Etiqueta)+">\n")
#----------------IF--------------------------------------
def p_IF(t):
''' IF : if E then L_BLOCK end if ptComa
'''
global cont
t[0] = Node("IF","",cont,0,0)
cont = cont+1
nodo1 = Node("if",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("then",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo3 = Node("end",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo4 = Node("if",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_IF1(t):
''' IF : if E then L_BLOCK ELSE
'''
global cont
t[0] = Node("IF","",cont,0,0)
cont = cont+1
nodo1 = Node("if",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("then",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_IF2(t):
''' IF : if E then L_BLOCK ELSEIF ELSE
'''
global cont
t[0] = Node("IF","",cont,0,0)
cont = cont+1
nodo1 = Node("if",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("then",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
t[0].AddHijos(t[5])
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_ELSE(t):
''' ELSE : else L_BLOCK end if ptComa
'''
global cont
t[0] = Node("ELSE","",cont,0,0)
cont = cont+1
nodo1 = Node("else",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo3 = Node("if",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_ELSEIF(t):
''' ELSEIF : ELSEIF SINOSI
'''
t[0] = t[1]
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_ELSEIF1(t):
''' ELSEIF : SINOSI
'''
global cont
t[0] = Node("ELSEIF","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_SINOSI(t):
''' SINOSI : elsif E then L_BLOCK
'''
global cont
t[0] = Node("SINOSI","",cont,0,0)
cont = cont+1
nodo1 = Node("elsif",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("then",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_SEARCH_CASE1(t):
''' SEARCH_CASE : case E L_CASE end case ptComa
'''
global cont
t[0] = Node("SEARCH_CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("case",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo3 = Node("case",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_SEARCH_CASE2(t):
''' SEARCH_CASE : case E L_CASE SINO end case ptComa
'''
global cont
t[0] = Node("SEARCH_CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("case",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo3 = Node("case",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
t[0].AddHijos(t[4])
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_SEARCH_CASE3(t):
''' SEARCH_CASE : case L_CASE SINO end case ptComa
'''
global cont
t[0] = Node("SEARCH_CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("case",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo3 = Node("case",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_SEARCH_CASE4(t):
''' SEARCH_CASE : case L_CASE end case ptComa
'''
global cont
t[0] = Node("SEARCH_CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("case",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("end",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo3 = Node("case",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_CUERPOCASE(t):
''' L_CASE : L_CASE CASE
'''
t[0] = t[1]
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_CUERPOCASE1(t):
''' L_CASE : CASE
'''
global cont
t[0] = Node("L_CASE","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_CASE(t):
''' CASE : when LISTA_EXP then L_BLOCK
| when COND1 then L_BLOCK
'''
global cont
t[0] = Node("CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("when",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("then",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_SINO(t):
''' SINO : else L_BLOCK
'''
global cont
t[0] = Node("SINO","",cont,0,0)
cont = cont+1
nodo1 = Node("else",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_Raice_Note(t):
' DECLARACION_RAICENOTE : raise notice LISTA_EXP ptComa'
global cont
t[0] = Node("DECLARACION_RAICENOTE","",cont,0,0)
cont = cont+1
nodo1 = Node("raise",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("notice",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
#------------------------INDEX
def p_index1(t):
''' CREATE_INDEX : create index id on id OPCION_INDEX ptComa
'''
global cont
t[0] = Node("CREATE_INDEX","",cont,0,0)
cont = cont+1
nodo1 = Node("create",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("index",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("on",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("id",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_index2(t):
''' CREATE_INDEX : create index id on id OPCION_INDEX EXPR_WHERE ptComa
'''
global cont
t[0] = Node("CREATE_INDEX","",cont,0,0)
cont = cont+1
nodo1 = Node("create",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("index",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("on",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("id",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
t[0].AddHijos(t[7])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_index3(t):
''' CREATE_INDEX : create tUnique index id on id OPCION_INDEX ptComa
'''
global cont
t[0] = Node("CREATE_INDEX","",cont,0,0)
cont = cont+1
nodo1 = Node("create",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tUnique",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("index",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("id",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("on",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
t[0].AddHijos(t[7])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_index4(t):
''' CREATE_INDEX : create tUnique index id on id OPCION_INDEX EXPR_WHERE ptComa
'''
global cont
t[0] = Node("CREATE_INDEX","",cont,0,0)
cont = cont+1
nodo1 = Node("create",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tUnique",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("index",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("id",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("on",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
t[0].AddHijos(t[7])
t[0].AddHijos(t[8])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_createIndex1(t):
''' OPCION_INDEX : using hash parAbre id parCierra
'''
global cont
t[0] = Node("OPCION_INDEX","",cont,0,0)
cont = cont+1
nodo1 = Node("using",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("hash",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("id",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo4)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex2(t):
''' OPCION_INDEX : parAbre OPT_INDEX_PAR parCierra
'''
global cont
t[0] = Node("OPCION_INDEX","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex3(t):
''' OPCION_INDEX : parAbre OPT_INDEX_PAR parCierra include parAbre OPT_INDEX_PAR parCierra
'''
global cont
t[0] = Node("OPCION_INDEX","",cont,0,0)
cont = cont+1
nodo1 = Node("include",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[2])
nodo1 = Node("include",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_createIndex4(t):
' OPT_INDEX_PAR : L_IDs'
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex2_1(t):
' OPT_INDEX_PAR : id nulls FIRST_LAST'
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("nulls",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex2_2(t):
' OPT_INDEX_PAR : id state '
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("state",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex2_3(t):
' OPT_INDEX_PAR : lower parAbre id parCierra '
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
nodo1 = Node("lower",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex_5(t):
' OPT_INDEX_PAR : id parAbre id parCierra '
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
nodo1 = Node("id",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex_6(t):
' OPT_INDEX_PAR : E '
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_createIndex_7(t):
' OPT_INDEX_PAR : L_PARAMETROS '
global cont
t[0] = Node("OPT_INDEX_PAR","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_first_last(t):
''' FIRST_LAST : first
| last '''
global cont
t[0] = Node("FIRST_LAST","",cont,0,0)
cont = cont+1
nodo1 = Node(str(t[1]),t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_USEDB(t):
''' USEDB : tuse id ptComa'''
global cont
t[0] = Node("USEDATABASE","",cont,0,0)
cont = cont+1
nodo1 = Node("tuse", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo3 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
#lista.append("<USEDB> :: = <"+str(t[1].Etiqueta)+"> <"+str(t[2].Etiqueta)+"> <"+str(t[3].Etiqueta)+"> <tk_ptComa>" )
# <<<<<<<<<<<<<<<<<<<<<<<<<<< HEIDY <<<<<<<<<<<<<<<<<<<<<<<<<<<<
def p_crearBase1(t):
'''CrearBase : create database E ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase2(t):
'''CrearBase : create database E owner igual E ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("owner",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("igual",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase3(t):
'''CrearBase : create database E mode igual entero ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("mode",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("igual",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("entero",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase4(t):
'''CrearBase : create database E owner igual E mode igual entero ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("owner",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("igual",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo7 = Node("mode",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("igual",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo9 = Node("entero",t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[3])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
t[0].AddHijos(nodo9)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase5(t):
'''CrearBase : create or replace database E ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("or",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("replace",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("database",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase6(t):
'''CrearBase : create or replace database E owner igual E ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("or",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("replace",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("database",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo6 = Node("owner",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("igual",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[5])
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(t[8])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase7(t):
'''CrearBase : create or replace database E mode igual entero ptComa'''
global cont
nodo = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("or",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("replace",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("database",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo6 = Node("mode",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("igual",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("entero",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[5])
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase8(t):
'''CrearBase : create or replace database E owner igual E mode igual entero ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("or",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("replace",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("database",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo6 = Node("owner",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("igual",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo9 = Node("mode",t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
nodo10 = Node("igual",t[10],cont,t.lineno(10) ,t.lexpos(10))
cont = cont+1
nodo11 = Node("entero",t[11],cont,t.lineno(11) ,t.lexpos(11))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[5])
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(t[8])
t[0].AddHijos(nodo9)
t[0].AddHijos(nodo10)
t[0].AddHijos(nodo11)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase9(t):
'''CrearBase : create database if not exists E ptComa'''
lista.append("<CrearBase> :: = <create> <database> <if> <not> <exists><E> <tk_ptComa>")
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("if",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("not",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("exit",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase10(t):
'''CrearBase : create database if not exists E owner igual E ptComa'''
global cont
nodo = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("if",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("not",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("exits",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo7 = Node("owner",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("igual",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
t[0].AddHijos(t[9])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase11(t):
'''CrearBase : create database if not exists E mode igual entero ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("if",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("not",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("exists",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo7 = Node("mode",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("igual",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo9 = Node("entero",t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
t[0].AddHijos(nodo9)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_crearBase12(t):
'''CrearBase : create database if not exists E owner igual E mode igual entero ptComa'''
global cont
t[0] = Node("CrearBase","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("if",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("not",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("exists",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo7 = Node("owner",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("igual",t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo10 = Node("mode",t[10],cont,t.lineno(10) ,t.lexpos(10))
cont = cont+1
nodo11 = Node("igual",t[11],cont,t.lineno(11) ,t.lexpos(11))
cont = cont+1
nodo12 = Node("entero",t[12],cont,t.lineno(12) ,t.lexpos(12))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
t[0].AddHijos(t[9])
t[0].AddHijos(nodo10)
t[0].AddHijos(nodo11)
t[0].AddHijos(nodo12)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_showBase1(t):
'''ShowBase : show databases ptComa'''
global cont
nodo = Node("ShowBase","",cont,0,0)
cont = cont+1
nodo1 = Node("show", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("databases", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_showBase2(t):
'''ShowBase : show databases like cadenaLike ptComa'''
global cont
nodo = Node("ShowBase","",cont,0,0)
cont = cont+1
nodo1 = Node("show", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("databases", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("like", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("cadenaLike", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(nodo3)
nodo.AddHijos(nodo4)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# ------ALTER-------
def p_AlterBase(t):
''' AlterBase : alter database E rename tTo id ptComa
'''
global cont
nodo = Node("AlterBase","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("raname",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("to",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(t[3])
nodo.AddHijos(nodo4)
nodo.AddHijos(nodo5)
nodo.AddHijos(nodo6)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_AlterBase1(t):
'''AlterBase : alter database E owner tTo id ptComa
'''
global cont
nodo = Node("AlterBase","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("owner",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("to",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id",t[6],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(t[3])
nodo.AddHijos(nodo4)
nodo.AddHijos(nodo5)
nodo.AddHijos(nodo6)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_AlterBase2(t):
'''AlterBase : alter database E owner tTo currentuser ptComa
'''
global cont
nodo = Node("AlterBase","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("owner",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("to",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("currentuser",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(t[3])
nodo.AddHijos(nodo4)
nodo.AddHijos(nodo5)
nodo.AddHijos(nodo6)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_AlterBase3(t):
'''AlterBase : alter database E owner tTo sessionuser ptComa
'''
lista.append("<AlterBase> ::= <alter> <database> <E> <owner> <tk_To> <sessionuser> <tk_ptComa>\n")
global cont
nodo = Node("AlterBase","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo4 = Node("owner",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("to",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("sessionuser",t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(t[3])
nodo.AddHijos(nodo4)
nodo.AddHijos(nodo5)
nodo.AddHijos(nodo6)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DropBase(t):
'''DropBase : drop database E ptComa'''
global cont
nodo = Node("DropBase","",cont,0,0)
cont = cont+1
nodo1 = Node("drop", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(t[3])
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_DropBase1(t):
'''DropBase : drop database if exists id ptComa'''
global cont
nodo = Node("DropBase","",cont,0,0)
cont = cont+1
nodo1 = Node("drop", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("database",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("if",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("exists",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("id",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(nodo3)
nodo.AddHijos(nodo4)
nodo.AddHijos(nodo5)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EnumType2(t):
'EnumType : create ttype id as tenum parAbre LISTA_EXP parCierra ptComa'
global cont
nodo = Node("EnumType","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("ttype",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("as",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tenum",t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(nodo3)
nodo.AddHijos(nodo4)
nodo.AddHijos(nodo5)
nodo.AddHijos(t[7])
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# <<<<<<<<<<<<<<<<<<<<<<<<<<< HEIDY <<<<<<<<<<<<<<<<<<<<<<<<<<<<
# <<<<<<<<<<<<<<<<<<<<<<<<<<< ARIEL <<<<<<<<<<<<<<<<<<<<<<<<<<<<
# PRODUCCIÓN PARA HACER UN UPDATE
def p_produccion0(t):
''' UpdateBase : tUpdate id tSet L_ASIGN EXPR_WHERE ptComa '''
global cont
nodo = Node("UpdateBase","",cont,0,0)
cont = cont+1
nodo1 = Node("tUpdate", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("tSet",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(nodo3)
nodo.AddHijos(t[4])
nodo.AddHijos(t[5])
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# PRODUCCIÓN PARA HACER UN DELETE
def p_produccion0_1(t):
''' DeleteBase : tDelete from id EXPR_WHERE ptComa '''
global cont
nodo = Node("DeleteBase","",cont,0,0)
cont = cont+1
nodo1 = Node("tDelete", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("from",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(nodo3)
nodo.AddHijos(t[4])
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# CONDICIÓN QUE PUEDE O NO VENIR DENTRO DE UN DELETE
# /////////////////////////////////MODIFIQUE LA GRAMATICA////////////////////////////
def p_produccion0_2(t):
''' DeleteBase : tDelete from id ptComa
'''
global cont
nodo = Node("DeleteBase","",cont,0,0)
cont = cont+1
nodo1 = Node("tDelete", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("from",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(nodo2)
nodo.AddHijos(nodo3)
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# PRODUCCIÓN PARA HACER UN TRUNCATE
def p_produccion1_0(t):
''' TruncateBase : tTruncate L_IDs ptComa'''
global cont
nodo = Node("TruncateBase","",cont,0,0)
cont = cont+1
nodo1 = Node("tTruncate",t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo.AddHijos(nodo1)
nodo.AddHijos(t[2])
t[0] = nodo
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# PRODUCCIÓN PARA UNA LISTA DE IDENTIFICADORES
def p_produccion1_1(t):
''' L_IDs : L_IDs coma id
'''
global cont
t[0] = t[1]
nod = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nod)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_produccion1_2(t):
''' L_IDs : id '''
global cont
t[0] = Node("L_IDs","",cont,0,0)
cont = cont+1
nod = Node("id", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nod)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
# PRODUCCIÓN PARA UNA LISTA DE ASIGNACIONES: id1 = 2, id2 = 3, id3, = 'Hola', etc...
def p_produccion1(t):
''' L_ASIGN : L_ASIGN coma id igual E
'''
global cont
t[0] = t[1]
nodo1 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("igual",t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_produccion2(t):
''' L_ASIGN : id igual E '''
global cont
t[0] = Node("L_ASIGN","",cont,0,0)
cont=cont+1
nodo1 = Node("id", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("igual",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
# <<<<<<<<<<<<<<<<<<<<<<<<<<< ARIEL <<<<<<<<<<<<<<<<<<<<<<<<<<<<
# <<<<<<<<<<<<<<<<<<<<<<<<<<< FRANCISCO <<<<<<<<<<<<<<<<<<<<<<<<<<<<
def p_EXPR_CREATE_TABLE1(t):
'''CREATE_TABLE : create table id parAbre COLUMNS parCierra ptComa '''
global cont
t[0] = Node("CREATE_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_CREATE_TABLE2(t):
'''CREATE_TABLE : create table id parAbre COLUMNS parCierra tInherits parAbre id parCierra ptComa '''
global cont
t[0] = Node("CREATE_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("create", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table",t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id",t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("tInherits",t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo5 = Node("id",t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[5])
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_COLUMNS(t):
'''COLUMNS : COLUMNS coma ASSIGNS
'''
global cont
t[0] = t[1]
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_COLUMNS1(t):
'''COLUMNS : ASSIGNS
'''
global cont
t[0] = Node("COLUMNS","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS(t):
'''ASSIGNS : id TIPO'''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("id", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS2(t):
'''ASSIGNS : id TIPO OPCIONALES '''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("id", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGN3(t):
'''ASSIGNS : tCheck E'''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("tCheck", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS4(t):
'''ASSIGNS : tConstraint id tCheck E '''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("tConstraint", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("tCheck", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS5(t):
'''ASSIGNS : tUnique parAbre COLS parCierra'''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("tUnique", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS6(t):
'''ASSIGNS : tPrimary tKey parAbre COLS parCierra'''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("tPrimary", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tKey", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS7(t):
'''ASSIGNS : tForeign tKey parAbre COLS parCierra tReferences id parAbre COLS parCierra'''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("tForeign", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tKey", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("tReferences", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo4 = Node("id", t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(t[4])
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[9])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ASSIGNS8(t):
'''ASSIGNS : tConstraint id tForeign tKey parAbre COLS parCierra tReferences id parAbre COLS parCierra '''
global cont
t[0] = Node("ASSIGNS","",cont,0,0)
cont = cont+1
nodo1 = Node("tConstraint", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("tForeign", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("tKey", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo8 = Node("tReferences", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo9 = Node("id", t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[6])
t[0].AddHijos(nodo8)
t[0].AddHijos(nodo9)
t[0].AddHijos(t[11])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCIONALES(t):
'''OPCIONALES : OPCIONALES OPCION
'''
global cont
t[0] = t[1]
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCIONALES1(t):
'''OPCIONALES : OPCION '''
global cont
t[0] = Node("OPCIONALES","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION(t):
'''OPCION : tDefault E'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("tDefault", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION1(t):
'''OPCION : tPrimary tKey'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("tPrimary", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tKey", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION2(t):
'''OPCION : not null'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("not", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("null", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION3(t):
'''OPCION : null'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("null", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION4(t):
'''OPCION : tUnique'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("tUnique", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION5(t):
'''OPCION : tCheck E'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("tCheck", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION6(t):
''' OPCION : tConstraint id tUnique '''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("tConstraint", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("tUnique", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_OPCION7(t):
'''OPCION : tConstraint id tCheck E'''
global cont
t[0] = Node("OPCION","",cont,0,0)
cont = cont+1
nodo1 = Node("tConstraint", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("id", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("tCheck", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_COLS(t):
'''COLS : COLS coma E'''
global cont
t[0] = t[1]
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_COLS1(t):
'''COLS : E '''
global cont
t[0] = Node("COLS","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_TIPO(t):
'''TIPO : NUMERIC_TYPES
| CHAR_TYPES
| DATE_TYPES
| BOOL_TYPES
| E
| OTROSTIPOS
'''
t[0] = t[1]
lista.append("<TIPO> ::= <"+str(t[1].Etiqueta)+">")
def p_EXPR_NUMERIC_TYPES(t):
'''NUMERIC_TYPES : tSmallint
| tInteger
| tBigint
| tDecimal
| tNumeric
| tReal
| tDouble tPrecision
| tMoney'''
global cont
t[0] = Node("NUMERIC_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("t"+str(t[1]), t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_CHAR_TYPES1(t):
'''CHAR_TYPES : tVarchar parAbre entero parCierra
'''
global cont
t[0] = Node("CHAR_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("tVarchar", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
entero = Node("entero", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(entero)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_CHAR_TYPES2(t):
'''CHAR_TYPES : tCharacter tVarying parAbre entero parCierra
'''
global cont
t[0] = Node("CHAR_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("tCharacter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tVarying", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
entero = Node("entero", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(entero)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_CHAR_TYPES3(t):
'''CHAR_TYPES : tCharacter parAbre entero parCierra
'''
global cont
t[0] = Node("CHAR_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("tCharacter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
entero = Node("entero", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(entero)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_CHAR_TYPES4(t):
'''CHAR_TYPES : tChar parAbre entero parCierra
'''
global cont
t[0] = Node("CHAR_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("tChar", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
entero = Node("entero", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(entero)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_CHAR_TYPES5(t):
'''CHAR_TYPES : tText'''
global cont
t[0] = Node("CHAR_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("tText", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_DATE_TYPES(t):
'''DATE_TYPES : tDate
| tTimestamp
| tTime
| tInterval
'''
global cont
t[0] = Node("DATE_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("t"+str(t[1]), t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_DATE_TYPES1(t):
'''DATE_TYPES : tInterval FIELDS
'''
global cont
t[0] = Node("DATE_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("ttInterval", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(t[2])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_BOOL_TYPES(t):
'''BOOL_TYPES : tBoolean'''
global cont
t[0] = Node("BOOL_TYPES","",cont,0,0)
cont = cont+1
nodo1 = Node("tBoolean", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_FIELDS(t):
'''FIELDS : tYear
| tMonth
| tDay
| tHour
| tMinute
| tSecond'''
global cont
t[0] = Node("FIELDS","",cont,0,0)
cont = cont+1
nodo1 = Node("t"+str(t[1]), t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
t[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_SHOW_TABLE(t):
'''SHOW_TABLES : show tables ptComa'''
global cont
t[0] = Node("SHOW_TABLES","",cont,0,0)
cont = cont+1
nodo1 = Node("show", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tables", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_DROP_TABLE(t):
'''DROP_TABLE : drop table id ptComa
'''
global cont
t[0] = Node("DROP_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("drop", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tables", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE1(t):
'''ALTER_TABLE : alter table id rename tColumn id tTo id ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("rename", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tColumn", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("to", t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("id", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ALTER_TABLE2(t):
'''ALTER_TABLE : alter table id EXPR_ALTER
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_EXPR_ALTER_TABLE3(t):
'''ALTER_TABLE : alter table id LColumn ptComa'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE4(t):
'''ALTER_TABLE : alter table id add tCheck E ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("add", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tCheck", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(t[6])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE5(t):
'''ALTER_TABLE : alter table id add tConstraint id tUnique parAbre id parCierra ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("add", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tConstraint", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("tUnique", t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("id", t[9],cont,t.lineno(9) ,t.lexpos(9))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo8)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE6(t):
'''ALTER_TABLE : alter table id add tForeign tKey parAbre COLS parCierra tReferences id parAbre COLS parCierra ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("add", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tForeing", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("tkey", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("id", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo8 = Node("tReferences", t[10],cont,t.lineno(10) ,t.lexpos(10))
cont = cont+1
nodo9 = Node("id", t[11],cont,t.lineno(11) ,t.lexpos(11))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
t[0].AddHijos(t[8])
t[0].AddHijos(nodo8)
t[0].AddHijos(nodo9)
t[0].AddHijos(t[13])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE7(t):
'''ALTER_TABLE : alter table id drop tConstraint id ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("drop", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tConstraint", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# ***********************nuevo Gramatica*********************
def p_EXPR_ALTER_TABLE8(t):
'''ALTER_TABLE : alter table id LDColumn ptComa '''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE9(t):
'''ALTER_TABLE : alter table id rename tTo id ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("rename", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo7 = Node("to", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo8 = Node("id", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo7)
t[0].AddHijos(nodo8)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE10(t):
'''ALTER_TABLE : alter table id add tConstraint id tCheck E ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("add", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tConstraint", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("tCheck", t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
t[0].AddHijos(nodo7)
t[0].AddHijos(t[8])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER_TABLE11(t):
'''ALTER_TABLE : alter table id add tConstraint id tForeign tKey parAbre COLS parCierra tReferences id parAbre COLS parCierra ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("table", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("add", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("tConstraint", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("id", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo7 = Node("tForeign", t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo8 = Node("tKey", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
nodo12 = Node("tReferences", t[12],cont,t.lineno(12) ,t.lexpos(12))
cont = cont+1
nodo13 = Node("id", t[13],cont,t.lineno(13) ,t.lexpos(13))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo8)
t[0].AddHijos(t[10])
t[0].AddHijos(nodo12)
t[0].AddHijos(nodo13)
t[0].AddHijos(t[15])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_LDropColumn(t):
''' LDColumn : LDColumn coma LDCol'''
t[0] = t[1]
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_LDropColumn1(t):
''' LDColumn : LDCol'''
global cont
t[0] = Node("LDColumn","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_LDCol(t):
''' LDCol : drop tColumn id '''
global cont
t[0] = Node("LDCol","",cont,0,0)
cont = cont+1
nodo1 = Node("drop", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tColumn", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_LAddColumn(t):
''' LColumn : LColumn coma LCol'''
t[0] = t[1]
t[0].AddHijos(t[3])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_LAddColumn1(t):
''' LColumn : LCol '''
global cont
t[0] = Node("LColumn","",cont,0,0)
cont = cont+1
t[0].AddHijos(t[1])
lista.append(str(recorrerGramatica(t[0],0))+"\n")
def p_LCol(t):
'''LCol : add tColumn id TIPO'''
global cont
t[0] = Node("LCol","",cont,0,0)
cont = cont+1
nodo1 = Node("add", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tColumn", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(t[4])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# ********************************************
# /////// hice una produccion de mas
def p_EXPR_ALTER(t):
'''EXPR_ALTER : EXPR_ALTER coma alter tColumn id tSet not null ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("tColumna", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo3 = Node("id", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo4 = Node("tSet", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
nodo5 = Node("not", t[7],cont,t.lineno(7) ,t.lexpos(7))
cont = cont+1
nodo6 = Node("null", t[8],cont,t.lineno(8) ,t.lexpos(8))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER1(t):
'''EXPR_ALTER : EXPR_ALTER coma alter tColumn id ttype CHAR_TYPES ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo2 = Node("tColumna", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo3 = Node("id", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo4 = Node("ttype", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(t[1])
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[7])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER2(t):
'''EXPR_ALTER : alter tColumn id ttype CHAR_TYPES ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tColumna", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("ttype", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(t[5])
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
def p_EXPR_ALTER3(t):
'''EXPR_ALTER : alter tColumn id tSet not null ptComa
'''
global cont
t[0] = Node("ALTER_TABLE","",cont,0,0)
cont = cont+1
nodo1 = Node("alter", t[1],cont,t.lineno(1) ,t.lexpos(1))
cont = cont+1
nodo2 = Node("tColumna", t[2],cont,t.lineno(2) ,t.lexpos(2))
cont = cont+1
nodo3 = Node("id", t[3],cont,t.lineno(3) ,t.lexpos(3))
cont = cont+1
nodo4 = Node("tSet", t[4],cont,t.lineno(4) ,t.lexpos(4))
cont = cont+1
nodo5 = Node("not", t[5],cont,t.lineno(5) ,t.lexpos(5))
cont = cont+1
nodo6 = Node("null", t[6],cont,t.lineno(6) ,t.lexpos(6))
cont = cont+1
t[0].AddHijos(nodo1)
t[0].AddHijos(nodo2)
t[0].AddHijos(nodo3)
t[0].AddHijos(nodo4)
t[0].AddHijos(nodo5)
t[0].AddHijos(nodo6)
lista.append(str(recorrerGramatica(t[0],0))+"<tk_puntoComa>"+"\n")
# <<<<<<<<<<<<<<<<<<<<<<<<<<< FRANCISCO <<<<<<<<<<<<<<<<<<<<<<<<<<<<
# <<<<<<<<<<<<<<<<<<<<<<<<<<< EDI <<<<<<<<<<<<<<<<<<<<<<<<<<<<
def p_INSERT(p):
''' INSERT : insert into id values parAbre LISTA_EXP parCierra ptComa '''
global cont
p[0] = Node("INSERT","",cont,0,0)
cont = cont+1
nodo1 = Node("insert", p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("into",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo3 = Node("id",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo4 = Node("values",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
p[0].AddHijos(nodo4)
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"<tk_puntoComa>"+"\n")
def p_INSERT1(p):
''' INSERT : insert into id parAbre LISTA_EXP parCierra values parAbre LISTA_EXP parCierra ptComa '''
global cont
p[0] = Node("INSERT","",cont,0,0)
cont = cont+1
nodo1 = Node("insert", p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("into",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo3 = Node("id",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo4 = Node("values",p[7],cont,p.lineno(7) ,p.lexpos(7))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
p[0].AddHijos(p[5])
p[0].AddHijos(nodo4)
p[0].AddHijos(p[9])
lista.append(str(recorrerGramatica(p[0],0))+"<tk_puntoComa>"+"\n")
def p_LISTA_EXP1(p):
''' LISTA_EXP : LISTA_EXP coma E_FUNC
'''
global cont
p[0] = p[1]
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LISTA_EXP2(p):
''' LISTA_EXP : E_FUNC
'''
global cont
p[0] = Node("E_FUNC","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_E(p):
''' E : E or E
| E And E
| E diferente E
| E notEqual E
| E igual E
| E mayor E
| E menor E
| E mayorIgual E
| E menorIgual E
| E mas E
| E menos E
| E multi E
| E divi E
| E modulo E
| E elevado E
| E punto E
| E barraDoble E
'''
global cont
if p[2].lower() == "or":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("or",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2].lower() == "and":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("and",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "<>":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("diferente",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "!=":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("notEqual",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "=":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("igual",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == ">":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("mayor","\\"+str(p[2]),cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "<":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("menor","\\"+str(p[2]),cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == ">=":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("mayorIgual","\\"+str(p[2]),cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "<=":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("menorIgual","\\"+str(p[2]),cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "+":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("mas",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "-":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("menos",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "*":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("multi",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "/":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("divi",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "%":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("modulo",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "**":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("elevado",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == ".":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("punto",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
elif p[2] == "||":
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("or",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_OpNot(p):
''' E : not E '''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("not",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_OpNegativo(p):
''' E : menos E %prec umenos '''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("menos",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_OpParentesis(p):
''' E : parAbre E parCierra '''
p[0] = p[2]
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_entero(p):
''' E : entero
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("entero",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_decimal(p):
''' E : decimal
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("decimal",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_cadena(p):
''' E : cadena
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("cadena",str(p[1]),cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_id(p):
''' E : id
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("id",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_fecha(p):
''' E : fecha
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("fecha",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_hora(p):
''' E : hora
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("hora",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_fecha_hora(p):
''' E : fecha_hora
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("fecha_hora",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_booleano(p):
'''E : yes
| no
| on
| off
| tTrue
| tFalse
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_Intervaloc(p):
''' E : intervaloc
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("intervaloc",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_val(p):
''' E : val
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
nodo1 = Node("val",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_CALL(p):
''' E : CALL
'''
global cont
p[0] = Node("E","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
# <<<<<<<<<<<<<<<<<<<<<<<<<<< EDI <<<<<<<<<<<<<<<<<<<<<<<<<<<<
######################################### QUERIES
def p_QUERY1(p):
'''QUERY : EXPR_SELECT
'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY2(p):
'''QUERY : EXPR_SELECT EXPR_FROM
'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY3(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_HAVING EXPR_ORDERBY EXPR_LIMIT
'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
p[0].AddHijos(p[7])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
#LEN 4 #select, ffrom, where, groupby, having, orderby, limit
def p_QUERY_p4_1(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_ORDERBY'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p4_2(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p4_3(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p4_4(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_HAVING'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p4_5(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
#LEN 5 #select, ffrom, where, groupby, having, orderby, limit
def p_QUERY_p5_1(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_ORDERBY EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_2(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_ORDERBY '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_3(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_4(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_5(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_6(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_ORDERBY '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_7(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_HAVING'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_8(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_HAVING EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_9(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_HAVING EXPR_ORDERBY'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p5_10(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_HAVING'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
#LEN 6 #select, ffrom, where, groupby, having, orderby, limit
def p_QUERY_p6_1(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_ORDERBY EXPR_LIMIT '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_2(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_ORDERBY '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_3(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_LIMIT '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_4(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_HAVING '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_5(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_ORDERBY EXPR_LIMIT '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_6(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_HAVING EXPR_LIMIT '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_7(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_HAVING EXPR_ORDERBY '''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_8(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_HAVING EXPR_ORDERBY EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_9(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_HAVING EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p6_10(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_HAVING EXPR_ORDERBY'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
#LEN 7 #select, ffrom, where, groupby, having, orderby, limit
def p_QUERY_p7_1(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_ORDERBY EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p7_2(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_HAVING EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p7_3(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_GROUPBY EXPR_HAVING EXPR_ORDERBY'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p7_4(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_GROUPBY EXPR_HAVING EXPR_ORDERBY EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_QUERY_p7_5(p):
'''QUERY : EXPR_SELECT EXPR_FROM EXPR_WHERE EXPR_HAVING EXPR_ORDERBY EXPR_LIMIT'''
global cont
p[0] = Node("QUERY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
p[0].AddHijos(p[4])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_SELECT(p):
'''EXPR_SELECT : select multi
'''
global cont
p[0] = Node("EXPR_SELECT","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node(str(p[2]),p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_SELECT1(p):
'''EXPR_SELECT : select distinct EXPR_COLUMNAS'''
global cont
p[0] = Node("EXPR_SELECT","",cont,0,0)
cont = cont+1
nodo1 = Node("select",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("distinct",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_SELECT_C(p):
'''EXPR_SELECT : select EXPR_COLUMNAS'''
global cont
p[0] = Node("EXPR_SELECT","",cont,0,0)
cont = cont+1
nodo1 = Node("select",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
# todos los parametros de select - columnas
def p_EXPR_COLUMNAS(p):
'''EXPR_COLUMNAS : EXPR_COLUMNAS coma EXPR_COLUMNAS1'''
p[0]=p[1]
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LISTA_EXPR_COLUMNAS(p):
'''EXPR_COLUMNAS : EXPR_COLUMNAS1'''
global cont
p[0] = Node("EXPR_COLUMNAS","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
#LEN 1 y 3
def p_EXPR_COLUMNAS1(p):
'''EXPR_COLUMNAS1 : E
| EXPR_AGREGACION
| EXPR_MATHS
| EXPR_TRIG
| EXPR_BINARIAS
| EXPR_EXTRA
| EXPR_FECHA
| EXPR_CASE
'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS2(p):
'''EXPR_COLUMNAS1 : E as E
| EXPR_AGREGACION as E
| EXPR_MATHS as E
| EXPR_TRIG as E
| EXPR_BINARIAS as E
| EXPR_EXTRA as E
| EXPR_FECHA as E
| EXPR_CASE as E '''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodoas = Node("as",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodoas)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS3(p):
'''EXPR_COLUMNAS1 : EXPR_AGREGACION E
| EXPR_MATHS E
| EXPR_TRIG E
| EXPR_BINARIAS E
| EXPR_EXTRA E
| EXPR_FECHA E
| EXPR_CASE E
| E E'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS4(p):
'''EXPR_COLUMNAS1 : E punto multi '''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node("punto",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("multi",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
#LEN
def p_EXPR_COLUMNAS1_p1(p):
'''EXPR_COLUMNAS1 : substring parAbre E coma E coma E parCierra
'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node("substring",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[7])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p2(p):
'''EXPR_COLUMNAS1 : greatest parAbre E_LIST parCierra
| least parAbre E_LIST parCierra '''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p3(p):
'''EXPR_COLUMNAS1 : substring parAbre E coma E coma E parCierra as E '''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node("substring",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("as",p[8],cont,p.lineno(8) ,p.lexpos(8))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[7])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[9])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p4(p):
'''EXPR_COLUMNAS1 : greatest parAbre E_LIST parCierra as E
| least parAbre E_LIST parCierra as E '''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("as",p[5],cont,p.lineno(5) ,p.lexpos(5))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p5(p):
'''EXPR_COLUMNAS1 : substr parAbre E coma E coma E parCierra as E '''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node("substr",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("as",p[9],cont,p.lineno(9) ,p.lexpos(9))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[7])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[10])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p6(p):
'''EXPR_COLUMNAS1 : substr parAbre E coma E coma E parCierra
'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo1 = Node("substr",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("as",p[9],cont,p.lineno(9) ,p.lexpos(9))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[7])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p7(p):
'''EXPR_COLUMNAS1 : parAbre QUERY parCierra
'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p8(p):
'''EXPR_COLUMNAS1 : parAbre QUERY parCierra E
'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[2])
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_COLUMNAS1_p9(p):
'''EXPR_COLUMNAS1 : parAbre QUERY parCierra as E
'''
global cont
p[0] = Node("EXPR_COLUMNAS1","",cont,0,0)
cont = cont+1
nodo2 = Node("as",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[2])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_EXTRA1(p):
'''EXPR_EXTRA : tExtract parAbre FIELDS from E parCierra'''
global cont
p[0] = Node("EXPR_EXTRA","",cont,0,0)
cont = cont+1
nodo1 = Node("tExtract",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("from",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_EXTRA2(p):
'''EXPR_EXTRA : tExtract parAbre FIELDS from tTimestamp E parCierra'''
global cont
p[0] = Node("EXPR_EXTRA","",cont,0,0)
cont = cont+1
nodo1 = Node("tExtract",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("from",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
nodo3 = Node("tTimestamp",p[5],cont,p.lineno(5) ,p.lexpos(5))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_AGREGACION(p):
'''EXPR_AGREGACION : count E
| avg E
| max E
| min E
| sum E '''
global cont
p[0] = Node("EXPR_AGREGACION","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_AGREGACION1(p):
'''EXPR_AGREGACION : count parAbre multi parCierra
| avg parAbre multi parCierra
| max parAbre multi parCierra
| min parAbre multi parCierra
| sum parAbre multi parCierra'''
global cont
p[0] = Node("EXPR_AGREGACION","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node(str(p[3]),p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_MATHS(p):
'''EXPR_MATHS : abs E
| cbrt E
| ceil E
| ceiling E
| degrees E
| exp E
| factorial E
| floor E
| ln E
| log E
| radians E
| round E
| sign E
| sqrt E
| trunc E
'''
global cont
p[0] = Node("EXPR_MATHS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_MATHS1(p):
'''EXPR_MATHS : div parAbre E coma E parCierra
| gcd parAbre E coma E parCierra
| mod parAbre E coma E parCierra
| power parAbre E coma E parCierra
| round parAbre E coma E parCierra
'''
global cont
p[0] = Node("EXPR_MATHS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_MATHS2(p):
'''EXPR_MATHS : pi parAbre parCierra
| random parAbre parCierra'''
global cont
p[0] = Node("EXPR_MATHS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_MATHS3(p):
'''EXPR_MATHS : width_bucket parAbre LISTA_EXP parCierra'''
global cont
p[0] = Node("EXPR_MATHS","",cont,0,0)
cont = cont+1
nodo1 = Node("width_bucket",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_TRIG(p):
'''EXPR_TRIG : acos E
| acosd E
| asin E
| asind E
| atan E
| atand E
| cos E
| cosd E
| cot E
| cotd E
| sin E
| sind E
| tan E
| sinh E
| cosh E
| tanh E
| tand E
| asinh E
| acosh E
| atanh E'''
global cont
p[0] = Node("EXPR_TRIG","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_TRIG1(p):
'''EXPR_TRIG : atan2 parAbre E coma E parCierra
| atan2d parAbre E coma E parCierra '''
global cont
p[0] = Node("EXPR_TRIG","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS(p):
'''EXPR_BINARIAS : length E
| trim E
| md5 E
| sha256 E
| substr E
| barra E
| virgulilla E
| barraDoble E
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS1(p):
'''EXPR_BINARIAS : E amp E
| E barra E
| E numeral E
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[2]),p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIASmay(p):
'''EXPR_BINARIAS : E menormenor E
| E mayormayor E
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[2]),"\\"+str(p[2]),cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS2(p):
'''EXPR_BINARIAS : encode parAbre E dosPts dosPts bytea coma E parCierra
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node("enconde",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("bytea",p[6],cont,p.lineno(6) ,p.lexpos(6))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[8])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS3(p):
'''EXPR_BINARIAS : get_byte parAbre E dosPts dosPts bytea coma E parCierra
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node("get_byte",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("bytea",p[6],cont,p.lineno(6) ,p.lexpos(6))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[8])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS4(p):
'''EXPR_BINARIAS : set_byte parAbre E dosPts dosPts bytea coma E coma E parCierra
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node("set_byte",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("bytea",p[6],cont,p.lineno(6) ,p.lexpos(6))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[8])
p[0].AddHijos(p[10])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS5(p):
'''EXPR_BINARIAS : convert parAbre E as TIPO parCierra
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node("convert",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("as",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_BINARIAS6(p):
'''EXPR_BINARIAS : decode parAbre E coma E parCierra
'''
global cont
p[0] = Node("EXPR_BINARIAS","",cont,0,0)
cont = cont+1
nodo1 = Node("decode",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FECHA(p):
'''EXPR_FECHA : current_date
| current_time'''
global cont
p[0] = Node("EXPR_FECHA","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FECHA1(p):
'''EXPR_FECHA : date_part parAbre E coma DATE_TYPES E parCierra'''
global cont
p[0] = Node("EXPR_FECHA","",cont,0,0)
cont = cont+1
nodo1 = Node("date_part",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FECHA2(p):
'''EXPR_FECHA : DATE_TYPES E'''
global cont
p[0] = Node("EXPR_FECHA","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FECHA3(p):
'''EXPR_FECHA : now parAbre parCierra'''
global cont
p[0] = Node("EXPR_FECHA","",cont,0,0)
cont = cont+1
nodo1 = Node("now",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_CASE(p):
'''EXPR_CASE : case CASE_LIST end'''
global cont
p[0] = Node("EXPR_CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("case",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("end",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_CASE1(p):
'''EXPR_CASE : case CASE_LIST else E end'''
global cont
p[0] = Node("EXPR_CASE","",cont,0,0)
cont = cont+1
nodo1 = Node("case",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("else",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("end",p[5],cont,p.lineno(5) ,p.lexpos(5))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[4])
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_CASE_LIST(p):
'''CASE_LIST : CASE_LIST when E then E'''
global cont
p[0] = Node("CASE_LIST","",cont,0,0)
cont = cont+1
nodo1 = Node("when",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("then",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_CASE_LIST1(p):
'''CASE_LIST : when E then E '''
global cont
p[0] = Node("CASE_LIST","",cont,0,0)
cont = cont+1
nodo1 = Node("when",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("then",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_E_LIST(p):
'''E_LIST : E_LIST coma E_LIST1'''
p[0] = p[1]
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_E_LIST2(p):
'''E_LIST : E_LIST1'''
global cont
p[0] = Node("E_LIST","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_E_LIST1(p):
'''E_LIST1 : now parAbre parCierra'''
global cont
p[0] = Node("now",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_E_LIST3(p):
'''E_LIST1 : E'''
p[0]=p[1]
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FROM(p):
'''EXPR_FROM : from L_IDsAlias '''
global cont
p[0] = Node("EXPR_FROM","",cont,0,0)
cont = cont+1
nodo1 = Node("from",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FROM2(p):
'''EXPR_FROM : from parAbre QUERY parCierra'''
global cont
p[0] = Node("EXPR_FROM","",cont,0,0)
cont = cont+1
nodo1 = Node("from",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FROM3(p):
'''EXPR_FROM : from parAbre QUERY parCierra id'''
global cont
p[0] = Node("EXPR_FROM","",cont,0,0)
cont = cont+1
nodo1 = Node("from",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("id",p[5],cont,p.lineno(5) ,p.lexpos(5))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_FROM4(p):
'''EXPR_FROM : from parAbre QUERY parCierra as id'''
global cont
p[0] = Node("EXPR_FROM","",cont,0,0)
cont = cont+1
nodo1 = Node("from",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodoas = Node("as",p[5],cont,p.lineno(5) ,p.lexpos(5))
cont = cont+1
nodo2 = Node("id",p[6],cont,p.lineno(6) ,p.lexpos(6))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
p[0].AddHijos(nodoas)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_L_IDsAlias(p):
'''L_IDsAlias : L_IDsAlias coma L_IDsAlias1 '''
p[0] = p[1]
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_L_IDsAlias1(p):
'''L_IDsAlias : L_IDsAlias1 '''
global cont
p[0] = Node("L_IDsAlias","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_L_IDsAlias_p1(p):
'''L_IDsAlias1 : id id
'''
global cont
p[0] = Node("L_IDsAlias1","",cont,0,0)
cont = cont+1
nodo1 = Node("id",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("id",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_L_IDsAlias_p2(p):
'''L_IDsAlias1 : id as id
'''
global cont
p[0] = Node("L_IDsAlias1","",cont,0,0)
cont = cont+1
nodo1 = Node("id",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("as",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo3 = Node("id",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_L_IDsAlias_p3(p):
'''L_IDsAlias1 : id
'''
global cont
p[0] = Node("L_IDsAlias1","",cont,0,0)
cont = cont+1
nodo1 = Node("id",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_WHERE(p):
'''EXPR_WHERE : where LIST_CONDS '''
global cont
p[0] = Node("EXPR_WHERE","",cont,0,0)
cont = cont+1
nodo1 = Node("where",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_CONDS(p):
'''LIST_CONDS : LIST_CONDS COND1'''
p[0]=p[1]
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_CONDS1(p):
'''LIST_CONDS : COND1 '''
global cont
p[0] = Node("LIST_CONDS","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_CONDS2(p):
'''LIST_CONDS : LIST_CONDS ORAND COND1'''
p[0]=p[1]
p[0].AddHijos(p[2])
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_CONDS3(p):
'''LIST_CONDS : ORAND COND1 '''
global cont
p[0] = Node("LIST_CONDS","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORAND(p):
'''ORAND : or
| And
| barraDoble
'''
global cont
p[0] = Node("ORAND","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND1(p):
'''COND1 : E_FUNC '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND2(p):
'''COND1 : E_FUNC tIs distinct from E_FUNC'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("distinct",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("from",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND3(p):
'''COND1 : E_FUNC tIs not distinct from E_FUNC'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("not",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("distinct",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
nodo4 = Node("from",p[5],cont,p.lineno(5) ,p.lexpos(5))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
p[0].AddHijos(nodo4)
p[0].AddHijos(p[6])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND4(p):
'''COND1 : substring parAbre E_FUNC coma E_FUNC coma E_FUNC parCierra igual E_FUNC'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("substring",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("igual",p[9],cont,p.lineno(9) ,p.lexpos(9))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[7])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[10])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND5(p):
'''COND1 : exists parAbre QUERY parCierra'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("exist",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND6(p):
'''COND1 : not exists parAbre QUERY parCierra'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("not",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("exists",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND7(p):
'''COND1 : E_FUNC in parAbre QUERY parCierra '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("in",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND8(p):
'''COND1 : E_FUNC not in parAbre QUERY parCierra'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("not",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("in",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND9(p):
'''COND1 : E_FUNC OPERATOR any parAbre QUERY parCierra'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("any",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND10(p):
'''COND1 : E_FUNC OPERATOR some parAbre QUERY parCierra'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("some",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND11(p):
'''COND1 : E_FUNC OPERATOR all parAbre QUERY parCierra'''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("all",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(p[2])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[5])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND12(p):
'''COND1 : E_FUNC tBetween E_FUNC '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tBetween",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND13(p):
'''COND1 : E_FUNC not tBetween E_FUNC '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("not",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("tBetween",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND14(p):
'''COND1 : E_FUNC tIs tTrue '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("tTrue",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND15(p):
'''COND1 : E_FUNC tIs not tTrue '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("not",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("tTrue",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND16(p):
'''COND1 : E_FUNC tIs tFalse '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("tFalse",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND17(p):
'''COND1 : E_FUNC tIs not tFalse '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("not",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("tFalse",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND18(p):
'''COND1 : E_FUNC tIs unknown '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("unknown",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND19(p):
'''COND1 : E_FUNC tIs not unknown '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("not",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("unknown",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND20(p):
'''COND1 : E_FUNC tIs null '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("null",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND21(p):
'''COND1 : E_FUNC tIs not null '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIs",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("not",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node("null",p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND22(p):
'''COND1 : E_FUNC isNull '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("isNull",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND23(p):
'''COND1 : E_FUNC notNull '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("notNull",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND24(p):
'''COND1 : substr parAbre E_FUNC coma E_FUNC coma E_FUNC parCierra igual E_FUNC '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("substr",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo1 = Node("igual",p[9],cont,p.lineno(9) ,p.lexpos(9))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[3])
p[0].AddHijos(p[5])
p[0].AddHijos(p[7])
p[0].AddHijos(p[10])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND25(p):
'''COND1 : E_FUNC tILike cadenaLike '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tIlike",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("cadenaLike",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND26(p):
'''COND1 : E_FUNC like cadenaLike '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("like",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("cadenaLike",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_COND27(p):
'''COND1 : E_FUNC tSimilar tTo E_FUNC '''
global cont
p[0] = Node("COND1","",cont,0,0)
cont = cont+1
nodo1 = Node("tSimilar",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("tTo",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_OPERATOR(p):
'''OPERATOR : menor
| mayor
| menorIgual
| mayorIgual
'''
global cont
p[0] = Node("OPERATOR","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),"\\"+str(p[1]),cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_OPERATOR1(p):
'''OPERATOR : igual
| diferente
| notEqual
'''
global cont
p[0] = Node("OPERATOR","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[1]),p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_GROUPBY( p ):
'''EXPR_GROUPBY : group by LISTA_EXP'''
global cont
p[0] = Node("EXPR_GROUPBY","",cont,0,0)
cont = cont+1
nodo1 = Node("group",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("by",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_HAVING(p):
'''EXPR_HAVING : having E_FUNC '''
global cont
p[0] = Node("EXPR_HAVING","",cont,0,0)
cont = cont+1
nodo1 = Node("having",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_E_FUNC( p ):
'''E_FUNC : EXPR_AGREGACION
| EXPR_MATHS
| EXPR_TRIG
| EXPR_BINARIAS
| EXPR_FECHA
| E '''
p[0] = p[1]
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_ORDERBY( p ):
'''EXPR_ORDERBY : order by LIST_ORDERBY'''
global cont
p[0] = Node("EXPR_ORDERBY","",cont,0,0)
cont = cont+1
nodo1 = Node("order",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("by",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY(p):
'''LIST_ORDERBY : LIST_ORDERBY coma LIST_ORDERBY_1'''
p[0] = p[1]
p[0].AddHijos(p[3])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY1(p):
'''LIST_ORDERBY : LIST_ORDERBY_1'''
global cont
p[0] = Node("LIST_ORDERBY","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY_p1(p):
'''LIST_ORDERBY_1 : E asc nulls first
| E asc nulls last
| E desc nulls first
| E desc nulls last
'''
global cont
p[0] = Node("LIST_ORDERBY_1","",cont,0,0)
cont = cont+1
nodo1 = Node(str(p[2]),p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node("nulls",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
nodo3 = Node(str(p[4]),p[4],cont,p.lineno(4) ,p.lexpos(4))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY_p2(p):
'''LIST_ORDERBY_1 : E asc '''
global cont
p[0] = Node("LIST_ORDERBY_1","",cont,0,0)
cont = cont+1
nodo1 = Node("asc",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY_p3(p):
'''LIST_ORDERBY_1 : E desc '''
global cont
p[0] = Node("LIST_ORDERBY_1","",cont,0,0)
cont = cont+1
nodo1 = Node("desc",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY_p4(p):
'''LIST_ORDERBY_1 : E '''
global cont
p[0] = Node("LIST_ORDERBY_1","",cont,0,0)
cont = cont+1
p[0].AddHijos(p[1])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_LIST_ORDERBY_p5(p):
'''LIST_ORDERBY_1 : E nulls first
| E nulls last '''
global cont
p[0] = Node("LIST_ORDERBY_1","",cont,0,0)
cont = cont+1
nodo1 = Node("nulls",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo2 = Node(str(p[3]),p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(p[1])
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_LIMIT1(p):
'''EXPR_LIMIT : limit E'''
global cont
p[0] = Node("EXPR_LIMIT","",cont,0,0)
cont = cont+1
nodo1 = Node("limit",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_LIMIT2(p):
'''EXPR_LIMIT : limit all'''
global cont
p[0] = Node("EXPR_LIMIT","",cont,0,0)
cont = cont+1
nodo1 = Node("limit",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("all",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_LIMIT3(p):
'''EXPR_LIMIT : limit all offset E'''
global cont
p[0] = Node("EXPR_LIMIT","",cont,0,0)
cont = cont+1
nodo1 = Node("limit",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("all",p[2],cont,p.lineno(2) ,p.lexpos(2))
cont = cont+1
nodo3 = Node("offset",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(nodo2)
p[0].AddHijos(nodo3)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
def p_EXPR_LIMIT4(p):
'''EXPR_LIMIT : limit E offset E'''
global cont
p[0] = Node("EXPR_LIMIT","",cont,0,0)
cont = cont+1
nodo1 = Node("limit",p[1],cont,p.lineno(1) ,p.lexpos(1))
cont = cont+1
nodo2 = Node("offset",p[3],cont,p.lineno(3) ,p.lexpos(3))
cont = cont+1
p[0].AddHijos(nodo1)
p[0].AddHijos(p[2])
p[0].AddHijos(nodo2)
p[0].AddHijos(p[4])
lista.append(str(recorrerGramatica(p[0],0))+"\n")
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<< FIN DE LAS PRODUCCIONES <<<<<<<<<<<<<<<<<<<<<<<<<<<<<
def p_error(t):
print("Syntax error in input!"+t.value)
# <<<<<<<<<<<<<<<<<<<<<<<<<<<<< FIN DE LAS PRODUCCIONES <<<<<<<<<<<<<<<<<<<<<<<<<<<<<
import ply.yacc as yacc
import sys
from graphviz import render
from graphviz import Source
from graphviz import Digraph
limit = sys.getrecursionlimit()
print(limit)
def recorrerNodo(raiz):
cuerpo = ""
#sys.setrecursionlimit(1500)
for hijos in raiz.getHijos():
if hijos.Valor != None :
cuerpo += "\"" + str(raiz.idNod) + "\"" + " [label=\"" + raiz.Etiqueta + "\"]"+"\n"
cuerpo += "\"" + str(hijos.idNod) + "\"" + " [label=\"" + str(hijos.Valor) + "\"]"+"\n"
cuerpo += "\"" + str(raiz.idNod) + "\" -> " + "\"" + str(hijos.idNod) + "\""+"\n"
cuerpo += recorrerNodo(hijos)
return cuerpo
def contactenar(aux):
global concat
concat+=aux
def recorrerGramatica(raiz,cont):
gramatica="\n"
gramatica += "<" + str(raiz.Etiqueta) + "> ::= "
for hijos in raiz.getHijos():
if raiz.Valor== "" and hijos.Valor == "":
gramatica += " <" + str(hijos.Etiqueta) + "> "
if cont > 0 :
gramatica += recorrerGramatica(hijos,cont+1)
elif hijos.Valor != "" :
gramatica += " <" + str(hijos.Valor)+"> "
return gramatica
def GraficarAST(raiz):
'''
ast = Digraph('AST', filename='arbol.jpg', node_attr={'color':'chartreuse1' ,'style': 'filled', 'shape': 'Mrecord'})
ast.attr(rankdir='TB')
ast.edge_attr.update(color ='F5BDA2')
ast.body.append(recorrerNodo(raiz))
ast.render('arbol', format='jpg', view=True)
'''
ast = Digraph('AST', filename='arbol.jpg', node_attr={'color':'black','fillcolor':'#F5BDA2','style': 'filled', 'shape': 'Mrecord'})
ast.attr(rankdir='TB')
#ast.edge_attr.update(color ='F5BDA2')
ast.body.append(recorrerNodo(raiz))
ast.render('arbol', format='jpg', view=False)
def gramaticaBNF():
bnf=""
global lista
for item in reversed(lista) :
bnf +=item
return bnf
def ReporteGramatical():
'''
pdf =FPDF()
pdf.add_page()
pdf.set_font("Arial",size=12)
pdf.cell(200,100,txt=gramaticaBNF(), ln=1000, align="L")
pdf.output('gramaticaBNF.pdf')
pdf.close()
'''
file = open("gramaticaDinamico.txt", "w")
file.write(gramaticaBNF())
file.close()
def analizador(input):
global con
global lista
lista =[]
lexer = lex.lex()
lex.lex(reflags=re.IGNORECASE)
parser = yacc.yacc()
con = input
analizador=parser.parse(input)
return analizador
#nod =analizador("a b c d")
#print(recorrerarbol(nod))
| 28.620172 | 144 | 0.551273 | 28,198 | 179,334 | 3.455103 | 0.03057 | 0.102631 | 0.090437 | 0.113911 | 0.853141 | 0.83794 | 0.824351 | 0.811079 | 0.799306 | 0.791454 | 0 | 0.060684 | 0.238243 | 179,334 | 6,265 | 145 | 28.624741 | 0.652497 | 0.12363 | 0 | 0.773766 | 0 | 0.000427 | 0.07849 | 0.00059 | 0.000641 | 0 | 0 | 0.00016 | 0 | 1 | 0.079897 | false | 0 | 0.001923 | 0.000214 | 0.085238 | 0.001282 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a85a6898ff58d690a517252b0205cd94ab5a4c57 | 36,740 | py | Python | Python Programs/GPA.CALC.py | Anshhdeep/Hacktoberfest2020-Expert | 8745380deb626d1a06faf5a60ca37081aaeab426 | [
"MIT"
] | 77 | 2020-10-01T10:06:59.000Z | 2021-11-08T08:57:18.000Z | Python Programs/GPA.CALC.py | Anshhdeep/Hacktoberfest2020-Expert | 8745380deb626d1a06faf5a60ca37081aaeab426 | [
"MIT"
] | 46 | 2020-09-27T04:55:36.000Z | 2021-05-14T18:49:06.000Z | Python Programs/GPA.CALC.py | Anshhdeep/Hacktoberfest2020-Expert | 8745380deb626d1a06faf5a60ca37081aaeab426 | [
"MIT"
] | 327 | 2020-09-26T17:06:03.000Z | 2021-10-09T06:04:39.000Z | #Initalizing variables
classgradepregpa = 0
wasithonorsAPregular = ""
gpafclass = 0
#Not gonna do half credits cause that would take literal years
numofclass7sem1 = int(input("How many High School Courses did you take in the first semester of seventh grade?: "))
numofclass7sem2 = int(input("How many High School Courses did you take in the second semester of seventh grade?: "))
numofclass8sem1 = int(input("How many High School Courses did you take in the first semester of eighth grade?: "))
numofclass8sem2 = int(input("How many High School Courses did you take in the second semester of eighth grade?: "))
numofclass9sem1 = int(input("How many High School Courses did you take in the first semester of ninth grade?: "))
numofclass9sem2 = int(input("How many High School Courses did you take in the second semester of ninth grade?: "))
numofclass10sem1 = int(input("How many High School Courses did you take in the first semester of tenth grade?: "))
numofclass10sem2 = numofclass10sem1 = int(input("How many High School Courses did you take in the second semester of tenth grade?: "))
#For cumulative GPA
everygrade = []
#Calc first sem GPA
fstsemsevgpalist = []
for i in range(0, numofclass7sem1):
classgradepregpa = int(input("What was your grade for a class the first semester of 7th grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
fstsemsevgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass7sem1 > 0):
svnfstsemav = sum(fstsemsevgpalist) / len(fstsemsevgpalist)
svnfstsemav = str(svnfstsemav)
print("You got a "+svnfstsemav+" the first semester of 7th grade")
#Calc second sem 7th GPA
if(numofclass7sem2 > 0):
print("Moving on to second sem of 7th grade")
secsemsevgpalist = []
for i in range(0, numofclass7sem2):
classgradepregpa = int(input("What was your grade for a class the second semester of seventh grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
secsemsevgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass7sem2 > 0):
svnsecsemav = sum(secsemsevgpalist) / len(secsemsevgpalist)
svnsecsemav = str(svnsecsemav)
print("You got a "+svnsecsemav+" the second semester of 7th grade")
#Calc first sem 8th GPA
if(numofclass8sem1 > 0):
print("Moving on to the first sem of 8th grade")
fstsem8thgpalist = []
for i in range(0, numofclass8sem1):
classgradepregpa = int(input("What was your grade for a class the first semester of eighth grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
fstsem8thgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass8sem1 > 0):
eightsemoneav = sum(fstsem8thgpalist) / len(fstsem8thgpalist)
eightsemoneav = str(eightsemoneav)
print("You got a "+eightsemoneav+" the first semester of 8th grade")
#Calc second sem 8th GPA
if(numofclass8sem2 > 0):
print("Moving on to the second sem of 8th grade")
secsem8thgpalist = []
for i in range(0, numofclass8sem2):
classgradepregpa = int(input("What was your grade for a class the second semester of eighth grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
secsem8thgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass8sem2 > 0):
eightsemtwoav = sum(secsem8thgpalist) / len(secsem8thgpalist)
eightsemtwoav = str(eightsemoneav)
print("You got a "+eightsemtwoav+" the second semester of 8th grade")
#Calc first sem 9th GPA
if(numofclass9sem1 > 0):
print("Moving on to the first sem of 9th grade")
fstsem9thgpalist = []
for i in range(0, numofclass9sem1):
classgradepregpa = int(input("What was your grade for a class the first semester of ninth grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
fstsem9thgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass9sem1 > 0):
ninthsemoneav = sum(fstsem9thgpalist) / len(fstsem9thgpalist)
ninthsemoneav = str(ninthsemoneav)
print("You got a "+ninthsemoneav+" the first semester of 9th grade")
#Calc second sem 9th GPA
if(numofclass9sem2 > 0):
print("Moving on to the second sem of 9th grade")
secsem9thgpalist = []
for i in range(0, numofclass9sem2):
classgradepregpa = int(input("What was your grade for a class the second semester of ninth grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
secsem9thgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass9sem2 > 0):
ninthsemtwoav = sum(secsem9thgpalist) / len(secsem9thgpalist)
ninthsemtwoav = str(ninthsemtwoav)
print("You got a "+ninthsemtwoav+" the second semester of 9th grade")
#Calc first sem 10th GPA
if(numofclass10sem1 > 0):
print("Moving on to the first sem of 10th grade")
fstsem10thgpalist = []
for i in range(0, numofclass10sem1):
classgradepregpa = int(input("What was your grade for a class the first semester of tenth grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
fstsem10thgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass10sem1 > 0):
tenthsemoneav = sum(fstsem10thgpalist) / len(fstsem10thgpalist)
tenthsemoneav = str(tenthsemoneav)
print("You got a "+tenthsemoneav+" the first semester of 10th grade")
#Calc second sem 10th GPA
if(numofclass10sem1 > 0):
print("Moving on to the second sem of 10th grade")
secsem10thgpalist = []
for i in range(0, numofclass10sem2):
classgradepregpa = int(input("What was your grade for a class the second semester of tenth grade? "))
wasithonorsAPregular = input("Was the course Honors/AP/Regular? ANSWER WITH ONLY AP, H or R!")
if(wasithonorsAPregular == "AP"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 6.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 5.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 5.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 5.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 4.4
elif(classgradepregpa == 70):
gpafclass = 4.2
else:
gpafclass = 0.0
elif(wasithonorsAPregular == "H"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.5
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 5.3
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 5.1
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.9
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.7
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.5
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 4.3
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 4.1
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.9
elif(classgradepregpa == 70):
gpafclass = 3.7
else:
gpafclass = 0
elif(wasithonorsAPregular == "R"):
if(classgradepregpa == 100 or classgradepregpa == 99 or classgradepregpa == 98 or classgradepregpa == 97):
gpafclass = 5.0
elif(classgradepregpa == 96 or classgradepregpa == 95 or classgradepregpa == 94):
gpafclass = 4.8
elif(classgradepregpa == 93 or classgradepregpa == 92 or classgradepregpa == 91 or classgradepregpa == 90):
gpafclass = 4.6
elif(classgradepregpa == 89 or classgradepregpa == 88 or classgradepregpa == 87):
gpafclass = 4.4
elif(classgradepregpa == 86 or classgradepregpa == 85 or classgradepregpa == 84):
gpafclass = 4.2
elif(classgradepregpa == 83 or classgradepregpa == 82 or classgradepregpa == 81 or classgradepregpa == 80):
gpafclass = 4.0
elif(classgradepregpa == 79 or classgradepregpa == 78 or classgradepregpa == 77):
gpafclass = 3.8
elif(classgradepregpa == 76 or classgradepregpa == 75 or classgradepregpa == 74):
gpafclass = 3.6
elif(classgradepregpa == 73 or classgradepregpa == 72 or classgradepregpa == 71 ):
gpafclass = 3.4
elif(classgradepregpa == 70):
gpafclass = 3.2
else:
gpafclass = 0
else:
print("Why did you not put H, R, or AP? You just broke the program!")
secsem10thgpalist.append(gpafclass)
everygrade.append(gpafclass)
if(numofclass10sem2 > 0):
tenthsemtwoav = sum(secsem10thgpalist) / len(secsem10thgpalist)
tenthsemtwoav = str(tenthsemtwoav)
print("You got a "+tenthsemtwoav+" the second semester of 10th grade")
#There done.
print("Here are you GPA's")
if(numofclass7sem1>0):
#7th
svnfstsemav = str(svnfstsemav)
print("First Semester of Seventh Grade: "+ svnfstsemav)
if(numofclass7sem2>0):
svnsecsemav = str(svnsecsemav)
print("Second Semester of Seventh Grade: "+ svnsecsemav)
#8th
if(numofclass8sem1>0):
eightsemoneav = str(eightsemoneav)
print("First Semester of Eighth Grade: "+ eightsemoneav)
if(numofclass8sem2>0):
eightsemtwoav = str(eightsemtwoav)
print("Second Semester of Eighth Grade: "+ eightsemtwoav)
#9th
if(numofclass9sem1>0):
ninthsemoneav = str(ninthsemoneav)
print("First Semester of Ninth Grade: "+ ninthsemoneav)
if(numofclass9sem2>0):
ninthsemtwoav = str(ninthsemtwoav)
print("Second Semester of Ninth Grade: "+ ninthsemtwoav)
#10th
if(numofclass10sem1>0):
tenthsemoneav = str(tenthsemoneav)
print("First Semester of Tenth Grade: "+ tenthsemoneav)
if(numofclass10sem2>0):
tenthsemtwoav = str(tenthsemtwoav)
print("Second Semester of Tenth Grade: "+ ninthsemtwoav)
CumulativeGPA = sum(everygrade) / len(everygrade)
CumulativeGPA = str(CumulativeGPA)
print("Here is your CumulativeGPA: "+CumulativeGPA)
| 51.098748 | 135 | 0.674469 | 4,186 | 36,740 | 5.919732 | 0.036312 | 0.366102 | 0.027119 | 0.022276 | 0.897135 | 0.879056 | 0.858797 | 0.858797 | 0.858797 | 0.853188 | 0 | 0.077799 | 0.225422 | 36,740 | 718 | 136 | 51.169916 | 0.792958 | 0.00822 | 0 | 0.88604 | 0 | 0 | 0.087637 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.047009 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
764c75d5a11c0165a374563a0b15a1e00dd459fb | 427 | py | Python | tests/globals/document/__init__.py | RelevanceAI/RelevanceAI | a0542f35153d9c842f3d2cd0955d6b07f6dfc07b | [
"Apache-2.0"
] | 21 | 2021-11-23T13:01:36.000Z | 2022-03-23T03:45:30.000Z | tests/globals/document/__init__.py | RelevanceAI/RelevanceAI | a0542f35153d9c842f3d2cd0955d6b07f6dfc07b | [
"Apache-2.0"
] | 217 | 2021-11-23T00:11:01.000Z | 2022-03-30T08:11:49.000Z | tests/globals/document/__init__.py | RelevanceAI/RelevanceAI | a0542f35153d9c842f3d2cd0955d6b07f6dfc07b | [
"Apache-2.0"
] | 4 | 2022-01-04T01:48:30.000Z | 2022-02-11T03:19:32.000Z | from tests.globals.document.error_document import *
from tests.globals.document.simple_document import *
from tests.globals.document.datetime_document import *
from tests.globals.document.dataclass_document import *
from tests.globals.document.nested_document import *
from tests.globals.document.numpy_document import *
from tests.globals.document.pandas_document import *
from tests.globals.document.vector_document import *
| 47.444444 | 55 | 0.850117 | 56 | 427 | 6.339286 | 0.232143 | 0.202817 | 0.360563 | 0.540845 | 0.749296 | 0.749296 | 0 | 0 | 0 | 0 | 0 | 0 | 0.074941 | 427 | 8 | 56 | 53.375 | 0.898734 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
766a6a923c5bcc56957b3b86a732a4bfc3412e26 | 169 | py | Python | codewars/6kyu/doha22/twisted_sum/test.py | doha22/Training_one | 0cd7cf86c7da0f6175834146296b763d1841766b | [
"MIT"
] | null | null | null | codewars/6kyu/doha22/twisted_sum/test.py | doha22/Training_one | 0cd7cf86c7da0f6175834146296b763d1841766b | [
"MIT"
] | 2 | 2019-01-22T10:53:42.000Z | 2019-01-31T08:02:48.000Z | codewars/6kyu/doha22/twisted_sum/test.py | doha22/Training_one | 0cd7cf86c7da0f6175834146296b763d1841766b | [
"MIT"
] | 13 | 2019-01-22T10:37:42.000Z | 2019-01-25T13:30:43.000Z | import unittest
from twisted_sum import compute_sum
def test_compute_sum(benchmark):
assert benchmark(compute_sum,1) == 1
assert benchmark(compute_sum,2) == 3
| 21.125 | 40 | 0.769231 | 25 | 169 | 4.96 | 0.52 | 0.322581 | 0.354839 | 0.403226 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.027972 | 0.153846 | 169 | 7 | 41 | 24.142857 | 0.839161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.4 | 1 | 0.2 | false | 0 | 0.4 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
766c7bd5a0ff053b1f86fb5505126354f18b6735 | 47 | py | Python | pyQP/__init__.py | salini/pyQP | 88443d1eab67c3dba163b4acd0a85e8da1f3abe5 | [
"MIT"
] | 2 | 2019-07-12T13:22:05.000Z | 2020-04-18T02:24:08.000Z | pyQP/__init__.py | salini/pyQP | 88443d1eab67c3dba163b4acd0a85e8da1f3abe5 | [
"MIT"
] | null | null | null | pyQP/__init__.py | salini/pyQP | 88443d1eab67c3dba163b4acd0a85e8da1f3abe5 | [
"MIT"
] | null | null | null |
from pyQP import solve_qp, solve_qp_as_cvxopt
| 15.666667 | 45 | 0.851064 | 9 | 47 | 4 | 0.777778 | 0.388889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.12766 | 47 | 2 | 46 | 23.5 | 0.878049 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
767d7c2be53f6981c4e11423dbb797f14381e20c | 164 | py | Python | gym_flp/envs/__init__.py | TejaswiniMedi/gym-flp | 97d1d1b510896ab5b871cfc9f591fbbffd830ff4 | [
"MIT"
] | 1 | 2021-05-10T01:38:21.000Z | 2021-05-10T01:38:21.000Z | gym_flp/envs/__init__.py | TejaswiniMedi/gym-flp | 97d1d1b510896ab5b871cfc9f591fbbffd830ff4 | [
"MIT"
] | 10 | 2021-03-11T15:32:12.000Z | 2021-09-20T19:30:50.000Z | gym_flp/envs/__init__.py | TejaswiniMedi/gym-flp | 97d1d1b510896ab5b871cfc9f591fbbffd830ff4 | [
"MIT"
] | 1 | 2021-05-29T10:23:46.000Z | 2021-05-29T10:23:46.000Z | from gym_flp.envs.flp_env import qapEnv
from gym_flp.envs.flp_env import fbsEnv
from gym_flp.envs.flp_env import ofpEnv
from gym_flp.envs.flp_env import stsEnv
| 32.8 | 40 | 0.829268 | 32 | 164 | 4 | 0.3125 | 0.21875 | 0.3125 | 0.4375 | 0.8125 | 0.8125 | 0.8125 | 0 | 0 | 0 | 0 | 0 | 0.121951 | 164 | 4 | 41 | 41 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 10 |
76a07c1921370e91fe6c4d4c7ceda7143736c73e | 89,662 | py | Python | dwn_dlg.py | blackcatprog/VK-DCaP | d812b268b6897291e5a4cc69fb4d6b92eecf5287 | [
"MIT"
] | 3 | 2021-05-25T18:22:19.000Z | 2021-06-21T22:21:14.000Z | dwn_dlg.py | blackcatprog/VK-DCaP | d812b268b6897291e5a4cc69fb4d6b92eecf5287 | [
"MIT"
] | 1 | 2021-05-29T16:17:32.000Z | 2021-06-09T06:58:45.000Z | dwn_dlg.py | blackcatprog/VK-DCaP | d812b268b6897291e5a4cc69fb4d6b92eecf5287 | [
"MIT"
] | 2 | 2021-05-25T18:23:12.000Z | 2021-05-29T16:12:40.000Z | try:
try:
import sys
import vk_api
from token import token
from logs import *
import os
import requests
from datetime import datetime as dt
import locale
import shutil
except (ModuleNotFoundError, ImportError) as module_error:
mdl = str(module_error).split("'")[1]
error(f"Отсутствует необходимый модуль {mdl}!")
sys.exit(1)
#set local language
locale.setlocale(locale.LC_ALL, "ru_RU.UTF-8")
def auth():
"""sign in in vk.com"""
global session
session = vk_api.VkApi(token = token)
succes("Авторизация!")
def dwn_dlg(id_, count_, _photo=0, _audio=0, _music=0, _doc=0, _sd=0, _folder=0, _af=0, _ul=0, _cv=0, _all=0, _q="", _ud=0):
global getHistory
global html2
global off
html2 = ""
off = 0
auth()
SIZE_PHOTO = 0
if _q == "s":
SIZE_PHOTO = 5
elif _q == "m":
SIZE_PHOTO = 0
elif _q == "p":
SIZE_PHOTO = 2
elif _q == "q":
SIZE_PHOTO = 3
elif _q == "r":
SIZE_PHOTO = 4
elif _q == "x":
SIZE_PHOTO = 7
elif _q == "y":
SIZE_PHOTO = 8
elif _q == "w":
SIZE_PHOTO = 6
#create folder for saving dialog
try:
if _folder != 0:
os.mkdir(_folder)
except FileExistsError:
info("Папка уже существует!")
#if have parametr _af (all files), set value 1 (download) for variables downloading media files
if _af == 1:
_photo, _audio, _music, _doc, _sd = 1, 1, 1, 1, 1
if count_ > 200:
warn("Нельзя скачать более 200 сообщений. Вы можете скачать весь диалог сразу, воспользовавшись параметром -all.")
sys.exit(1)
if _all == 0:
try:
getHistory = session.method("messages.getHistory", {
"user_id": id_,
"count": count_,
"extended": 1,
})
if getHistory["count"] == 0 :
getHistory = session.method("messages.getHistory", {
"peer_id": "-" + str(id_),
"count": count_,
"extended": 1
})
if getHistory["count"] == 0 and len(str(id_)) < 5:
_id_ = "2000000000"[0:10 - len(str(id_))] + str(id_)
getHistory = session.method("messages.getHistory", {
"peer_id": _id_,
"count": count_,
"extended": 1
})
if getHistory["count"] == 0:
warn("Диалог не существует!")
sys.exit(1)
#out of program if count message in dialog/chat smaller than in you written
elif getHistory["count"] < count_:
warn(f"В диалоге нет {count_} сообщений!")
sys.exit(1)
except vk_api.exceptions.ApiError as err:
err = str(err)
#error sign in vk.com
if err[1] == "5":
error("Ошибка авторизации! Токен неправильный или срок его действия истёк!")
#not valid id user/group
elif err[1:4] == "100":
warn("Неправильный id пользователя/группы")
sys.exit(1)
succes("Получена история диалога!")
#get name dialog
type_ = getHistory["conversations"][0]["peer"]["type"]
if type_ == "chat":
name = getHistory["conversations"][0]["chat_settings"]["title"]
succes("Название диалога получено!")
elif type_ == "user":
name = getHistory["profiles"][0]["first_name"] + " " + getHistory["profiles"][0]["last_name"]
succes("Название диалога получено!")
elif type_ == "group":
name = getHistory["groups"][0]["name"]
succes("Название диалога получено!")
#getting link on user and dont't getting on chat
if getHistory["conversations"][0]["peer"]["type"] == "user":
link_user = "https://vk.com/" + getHistory["profiles"][0]["screen_name"]
succes("Ссылка на пользователя/группу получена!")
elif getHistory["conversations"][0]["peer"]["type"] == "chat":
link_user = "#"
info("Ссылка на беседу не оставляется!")
elif getHistory["conversations"][0]["peer"]["type"] == "group":
link_user = "https://vk.com/" + getHistory["groups"][0]["screen_name"]
succes("Ссылка на пользователя/группу получена!")
#getting count users
if getHistory["conversations"][0]["peer"]["type"] == "user":
users = ""
info("Количество участников недоступно в диалоге с пользователем!")
elif getHistory["conversations"][0]["peer"]["type"] == "chat":
users = f"<center style='color: #fff; padding: 10px'>Участиков: {str(getHistory['conversations'][0]['chat_settings']['members_count'])}</center>"
succes("Количество участников группы/беседы получено!")
else:
users = ""
avatar = ""
try:
if getHistory["conversations"][0]["peer"]["type"] == "user":
avatar = getHistory["profiles"][0]["photo_100"]
avatar = requests.get(avatar)
succes("Аватарка получена!")
elif getHistory["conversations"][0]["peer"]["type"] == "chat":
if "photo" in getHistory["conversations"][0]["chat_settings"].keys():
avatar = getHistory["conversations"][0]["chat_settings"]["photo"]["photo_100"]
avatar = requests.get(avatar)
succes("Аватарка получена!")
else:
pass
elif getHistory["conversations"][0]["peer"]["type"] == "groups":
avatar = getHistory["groups"][0]["photo_100"]
avatar = requests.get(avatar)
succes("Аватарка получена!")
except KeyError:
avatar = ""
info("Аватрка отсутствует!")
#download avatar
if avatar != "":
try:
if _folder == 0:
with open("avatar.jpg", "wb") as avatar_:
avatar_.write(avatar.content)
avatar = "avatar.jpg"
elif _folder != 0:
with open(f"{_folder}/avatar.jpg", "wb") as avatar_:
avatar_.write(avatar.content)
avatar = "avatar.jpg"
succes("Аватарка скачана!")
except AttributeError:
pass
#html templates
style = '''::-webkit-scrollbar {
width: 12px;
}
::-webkit-scrollbar-track {
background-color: rgba(0, 0, 0, 0.3)
}
::-webkit-scrollbar-thumb {
-webkit-border: 1px #fff;
border-radius: 20px;
background-color: #fff;
-webkit-box-shadow: inset 0 0 6px rgba(0,0,0,0.5);
}
* {
font-family: sans-serif;
}'''
if avatar != "":
ava = f"<img src='{avatar}' style='height: 100px; border-radius: 100px; margin-top: 20px; box-shadow: 0px 0px 10px #000'>"
if avatar == "":
ava = "<div style='display: inline-block; margin-top: 10px; width: 100px; height: 100px; border-radius: 200px; background: linear-gradient(to right, #00c6ff, #0072ff); box-shadow: 0px 0px 10px #000;'></div>"
#count messages
msgs_count = "<center style='font-family: sans-serif; color: #fff; padding: 10px'>Количество сообщений: " + str(getHistory["count"]) + "</center>"
html1 = f'''<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>{name}</title>
<style>
{style}
</style>
</head>
<body style='background-color: #333;'>
<div style='width: 750px; margin: 10px auto 10px auto; background: linear-gradient(to right, #3E608A, #69A3EA); border-radius: 20px'>
<center><a href='{link_user}' target='_blank'>{ava}</a></center>
<center style='padding: 10px; font-size: 20px; color: #fff'>{name}</center>
{users}
{msgs_count}
</div>
<div style='width: 750px; box-sizing: padding-box; background: linear-gradient(to right, #3E608A, #69A3EA); border-radius: 20px; margin-left: auto; margin-right: auto'>
<center style='font-size: 20px; color: #fff; padding: 10px'>Закреплённые сообщения</center>
'''
html2 = '''</div>
<div style='width: 750px; box-sizing: padding-box; margin: 10px; background: linear-gradient(to right, #3E608A, #69A3EA); border-radius: 20px; margin-left: auto; margin-right: auto'>'''
html3 = '''
</div>
</body>
</html>'''
succes("HTML шаблоны созданы!")
succes("Начало форматирования диалога!")
try:
for i in range(int(count_)):
try:
getUsers = session.method("users.get", {
"user_ids": getHistory["items"][i]["from_id"],
"fields": "photo_50"
})
except IndexError:
getUsers = session.method("users.get", {
"user_ids": getHistory[i]["from_id"],
"fields": "photo_50"})
#if have parametr _ul (user link) replace username on the link to this user
if _ul == 1:
user = f"<a href='https://vk.com/id{getHistory['items'][i]['from_id']}'>{getUsers[0]['first_name']}</a>"
else:
user = getUsers[0]["first_name"]
#getting avatars users in chat
ava_msg = ""
if type_ == "chat":
if _ud == 0:
ava_msg = getUsers[0]["photo_50"]
ava_msg = f"<div style='padding: 5px; float: left'><img src='{ava_msg}' style='border-radius: 50px; height: 50px'></div>"
#if have parametr _ud (user download) - download avatars
elif _ud == 1:
try:
if _folder != 0:
os.mkdir(f"{_folder}/avatars_msgs")
else:
os.mkdir("avatars_msgs")
except FileExistsError:
pass
ava_msg_ = getUsers[0]["photo_50"]
ava_msg_ = requests.get(ava_msg_)
name_ava_msg = getUsers[0]["first_name"] + getUsers[0]["last_name"] + "_avatar"
try:
if _folder == 0:
with open(f"avatars_msgs/{name_ava_msg}.jpg", "wb") as avatar_msg:
avatar_msg.write(ava_msg_.content)
ava_msg = f"<div style='padding: 5px; float: left'><img src='avatars_msgs/{name_ava_msg}.jpg' style='border-radius: 50px; height: 50px'></div>"
elif _folder != 0:
with open(f"{_folder}/avatars_msgs/{name_ava_msg}.jpg", "wb") as avatar_msg:
avatar_msg.write(ava_msg_.content)
ava_msg = f"<div style='padding: 5px; float: left'><img src='avatars_msgs/{name_ava_msg}.jpg' style='border-radius: 50px; height: 50px'></div>"
except AttributeError:
pass
else:
ava_msg = ""
#get time edition message
edit_msg = ""
if "update_time" in getHistory["items"][i]:
edit_msg = f"(ред. {(dt.fromtimestamp(getHistory['items'][i]['update_time']).strftime('%d %B %Y %H:%M:%S')}"
if len(getHistory["items"][i]["attachments"]) >= 1:
if getHistory["items"][i]["attachments"][0]["type"] == "photo":
sms = getHistory["items"][i]["text"]
if _photo == 0:
url_photo = getHistory["items"][i]["attachments"][0]["photo"]["sizes"][SIZE_PHOTO]["url"]
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}<br><br>
<img src='{url_photo}' style='height: 150px; border-radius: 20px; box-shadow: 0px 0px 10px #000'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<img src='{url_photo}' style='height: 150px; border-radius: 20px; box-shadow: 0px 0px 10px #000'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _photo == 1:
url_photo = requests.get(getHistory["items"][i]["attachments"][0]["photo"]["sizes"][SIZE_PHOTO]["url"])
name_photo = f"{(dt.fromtimestamp(getHistory['items'][i]['attachments'][0]['photo']['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.jpg"
if _folder == 0:
with open(name_photo, "wb") as file:
file.write(url_photo.content)
elif _folder != 0:
with open(f"{_folder}/{name_photo}", "wb") as file:
file.write(url_photo.content)
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<img src='{name_photo}' style='height: 150px; border-radius: 20px; box-shadow: 0px 0px 10px #000'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<img src='{name_photo}' style='height: 150px; border-radius: 20px;'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "audio_message":
sms = getHistory["items"][i]["text"]
transcription = getHistory["items"][i]["attachments"][0]["audio_message"]["transcript"]
if _audio == 0:
url_audio = getHistory["items"][i]["attachments"][0]["audio_message"]["link_mp3"]
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{url_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<audio src='{url_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _audio == 1:
url_audio = requests.get(getHistory["items"][i]["attachments"][0]["audio_message"]["link_mp3"])
name_audio = f"{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.mp3"
if _folder == 0:
with open(name_audio, "wb") as file:
file.write(url_audio.content)
elif _folder != 0:
with open(f"{_folder}/{name_audio}", "wb") as file:
file.write(url_audio.content)
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{name_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<audio src='{name_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "audio":
sms = getHistory["items"][i]["text"]
if _music == 0:
url_music = getHistory["items"][i]["attachments"][0]["audio"]["url"]
explicit = getHistory['items'][i]['attachments'][0]['audio']['is_explicit']
if explicit == True:
explicit = "🅴"
else:
explicit = ""
if getHistory["items"][i]["text"] != "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _music == 1:
url_music = requests.get(getHistory["items"][i]["attachments"][0]["audio"]["url"])
name_music = f"{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.mp3"
if getHistory["items"][i]["text"] != "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
/div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "doc":
sms = getHistory["items"][i]["text"]
if _doc == 0:
url_doc = getHistory["items"][i]["attachments"][0]["doc"]["url"]
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<a href='{url_doc}'>
ДОКУМЕНТ
</a>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<a href='{url_doc}'>
ДОКУМЕНТ
</a>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{getUsers[0]["first_name"]}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _doc == 1:
url_doc = getHistory["items"][i]["attachments"][0]["doc"]["url"]
doc_type = requests.get(getHistory["items"][i]["attachments"][0]["doc"]["ext"])
name_doc = f"{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.{doc_type}"
if getHistory["items"][i]["text"] != "":
sms = getHistory["items"][i]["text"]
if doc_type == "jpg" or doc_type == "png" or doc_type == "bmp":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<img src='{name_doc}'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"name_doc", "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif doc_type == "mp3" or doc_type == "wav" or doc_type == "aac":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{name_doc}' controls='controls'></audio>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(name_doc, "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif getHistory["items"][i]["text"] == "":
if doc_type == "jpg" or doc_type == "png" or doc_type == "bmp":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<img src='{name_doc}'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"name_doc", "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif doc_type == "mp3" or doc_type == "wav" or doc_type == "aac":
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{name_doc}' controls='controls'></audio>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"name_doc", "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif getHistory["items"][i]["attachments"][0]["type"] == "poll":
question = getHistory["items"][i]["attachments"][0]["poll"]["question"]
answers = getHistory["items"][i]["attachments"][0]["poll"]["answers"]
answers_ = ""
for j in range(len(answers)):
answer = getHistory["items"][i]["attachments"][0]["poll"]["answers"][j]["text"]
vote = getHistory["items"][i]["attachments"][0]["poll"]["answers"][j]["votes"]
rate = getHistory["items"][i]["attachments"][0]["poll"]["answers"][j]["rate"]
answer_html = f'''<div style='width: 200px; height: 20px; background-color: rgba(255, 255, 255, .3); padding: 5px; border-radius: 5px; margin: 5px'>
<span style='float: keft: margin-left: 10px; font-size: 15px'>{answer} • {vote}</span>
</div>'''
answers_ += answer_html
poll = f'''<div style='color: #fff'>
<center style='font-weight: bold; font-size: 15px'>ОПРОС</center>
<center style='padding: 5px'>Вопрос: {question}</center>
<div>
{answers_}
</div>
<span></span>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 630px; background-color: #249B87; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{poll}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 10px;font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "money_request":
comment = getHistory["items"][i]["text"]
sum_ = getHistory["items"][i]["attachments"][0]["money_request"]["total_amount"]["amount"]
finish_sum = f"{sum_[:-2:]}.{sum_[-2::]} {getHistory['items'][i]['attachments'][0]['money_request']['total_amount']['currency']['name']}"
transferred = getHistory["items"][i]["attachments"][0]["money_request"]["transferred_amount"]["amount"]
finish_post = f"{transferred[:-2:]}.{transferred[-2::]} {getHistory['items'][i]['attachments'][0]['money_request']['transferred_amount']['currency']['name']}"
money_html = f'''<div style='color: #fff'>
<span style='font-weight: bold; font-size: 15px'>ДЕНЕЖНЫЙ ЗАПРОС</span><br>
<span>Сумма: {finish_sum}</span><br>
<span>Собрано {transferred} из {finish_sum}</span><br>
<span>Комментарий: {comment}</span>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 630px; background-color: #249B87; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{money_html}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 10px;font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif "sticker" in getHistory["items"][i]["attachments"][0]:
sticker = getHistory["items"][i]["attachments"][0]["sticker"]["images"][1]["url"]
if _sd == 0:
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<img src='{sticker}'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
sticker = requests.get(sticker)
stick_name = f"{getHistory['items'][i]['attachments'][0]['sticker']['product_id']}.png"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<img src='{stick_name}'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"{stick_name}", "wb") as file:
file.write(sticker.content)
elif _folder != 0:
with open(f"{_folder}/{stick_name}", "wb") as file:
file.write(sticker.content)
else:
if "action" in getHistory["items"][i]:
if getHistory["items"][i]["action"]["type"] == "chat_unpin_message":
name_ = getUsers[0]["first_name"] + " " + getUsers[0]["last_name"]
msg_ = ""
if "message" in getHistory["items"][i]:
msg_ = getHistory["items"][i]["action"]["message"]
html2 += f'''<div style='display: block; text-align: center; padding: 5px; margin: 0 auto'>
<span style='font-weight: bold'>{name_}</span> <span style='font-weight: liter'>открепил сообщение \"{msg_}\"</span>
</div>
<br>'''
elif getHistory["items"][i]["action"]["type"] == "chat_pin_message":
name_ = getUsers[0]["first_name"] + " " + getUsers[0]["last_name"]
msg_ = ""
if "message" in getHistory["items"][i]:
msg_ = getHistory["items"][i]["action"]["message"]
html2 += f'''<div style='display: block; text-align: center; padding: 5px; margin: 0 auto'>
<span style='font-weight: bold'>{name_}</span> <span style='font-weight: liter'>закрепил сообщение \"{msg_}\"</span>
</div>
<br>'''
html1 += f'''<div>
</div>'''
elif getHistory["items"][i]["action"]["type"] == "chat_kick_user":
name_ = getUsers[0]["first_name"] + " " + getUsers[0]["last_name"]
html2 += f'''<div style='display: block; text-align: center; padding: 5px; margin: 0 auto'>
<span style='font-weight: bold'>{name_}</span> <span style='font-weight: liter'>покинул(а) чат</span>
</div>
<br>'''
elif len(getHistory["items"][0]["fwd_messages"]) >= 1:
sms = getHistory["items"][i]["text"]
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<code>{sms}</code><br>
<span style="color: red; font-size: 15px; font-weight: bold">ПЕРЕСЛАННЫЕ СООБЩЕНИЯ НЕ ПОДДЕРЖИВАЮТСЯ!</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div><br>'''
else:
sms = getHistory["items"][i]["text"]
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<code>{sms}</code>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div><br>'''
#joining all fragments html yemplates
html_join = html1 + html2 + html3
succes("Создание HTML страницы завершено!")
if _folder == 0:
with open(f"{name}.html", "w", encoding="utf-8") as file:
file.write(html_join)
succes("Диалог сохранен!")
elif _folder != 0:
with open(f"{_folder}/{name}.html", "w", encoding="utf-8") as file:
file.write(html_join)
succes("Диалог сохранен!")
except:
pass
elif _all == 1:
#variables using for downloading all dialog
v1 = 0
v2 = 0
v3 = 0
v4 = 0
v5 = 0
name = ""
avatar = ""
while True:
try:
getHistory = session.method("messages.getHistory", {
"user_id": id_,
"count": 200,
"extended": 1,
"offset": off
})
if getHistory["count"] == 0 :
getHistory = session.method("messages.getHistory", {
"peer_id": "-" + str(id_),
"count": 200,
"extended": 1,
"offset": off
})
if getHistory["count"] == 0 and len(str(id_)) < 5:
_id_ = "2000000000"[0:10 - len(str(id_))] + str(id_)
getHistory = session.method("messages.getHistory", {
"peer_id": _id_,
"count": 200,
"extended": 1,
"offset": off
})
if getHistory["count"] == 0:
sys.exit(1)
#out of program if count message in dialog/chat smallerthan in you written
elif getHistory["count"] < count_:
warn(f"В диалоге нет {count_} сообщений!")
sys.exit(1)
except vk_api.exceptions.ApiError as err:
err = str(err)
#error sign in vk.com
if err[1] == "5":
error("Ошибка авторизации! Токен неправильный или срок его действия истёк!")
#not valid id user/group
elif err[1:4] == "100":
warn("Неправильный id пользователя/группы")
sys.exit(1)
#if log was shown, the will not be displayed
if v5 != 1:
succes("Получена история диалога!")
v5 = 1
#getting name dialog
type_ = getHistory["conversations"][0]["peer"]["type"]
#if log was shown, the will not be displayed
if v1 != 1:
if name == "":
if type_ == "chat":
name = getHistory["conversations"][0]["chat_settings"]["title"]
succes("Название диалога получено!")
elif type_ == "user":
name = getHistory["profiles"][0]["first_name"] + " " + getHistory["profiles"][0]["last_name"]
succes("Название диалога получено!")
elif type_ == "group":
name = getHistory["groups"][0]["name"]
succes("Название диалога получено!")
if getHistory["conversations"][0]["peer"]["type"] == "user":
link_user = "https://vk.com/" + getHistory["profiles"][0]["screen_name"]
succes("Ссылка на пользователя/группу получена!")
elif getHistory["conversations"][0]["peer"]["type"] == "chat":
link_user = "#"
info("Ссылка на беседу не оставляется!")
elif getHistory["conversations"][0]["peer"]["type"] == "group":
link_user = "https://vk.com/" + getHistory["groups"][0]["screen_name"]
succes("Ссылка на пользователя/группу получена!")
v1 = 1
#getting count users
#if log was shown, the will not be displayed
if v2 != 1:
if getHistory["conversations"][0]["peer"]["type"] == "user":
users = ""
succes("Количество пользователей получено!")
elif getHistory["conversations"][0]["peer"]["type"] == "chat":
users = f"<center style='color: #fff; padding: 10px'>Участиков: {str(getHistory['conversations'][0]['chat_settings']['members_count'])}</center>"
succes("Количество пользователей получено!")
v2 = 1
avatar = ""
#getting and saving avatar
#if log was shown, the will not be displayed
if v3 != 1:
try:
if getHistory["conversations"][0]["peer"]["type"] == "user":
avatar = getHistory["profiles"][0]["photo_100"]
avatar = requests.get(avatar)
succes("Аватарка получена!")
elif getHistory["conversations"][0]["peer"]["type"] == "chat":
avatar = getHistory["conversations"][0]["chat_settings"]["photo"]["photo_100"]
avatar = requests.get(avatar)
succes("Аватарка получена!")
elif getHistory["conversations"][0]["peer"]["type"] == "groups":
avatar = getHistory["groups"][0]["photo_100"]
avatar = requests.get(avatar)
succes("Аватарка получена!")
except KeyError:
avatar = ""
info("Аватарка отсутствует!")
try:
if _folder == 0:
with open("avatar.jpg", "wb") as avatar_:
avatar_.write(avatar.content)
avatar = "avatar.jpg"
elif _folder != 0:
with open(f"{_folder}/avatar.jpg", "wb") as avatar_:
avatar_.write(avatar.content)
avatar = "avatar.jpg"
succes("Аватарка скачана!")
except AttributeError:
pass
v3 = 1
#count posts
#if log was shown, the will not be displayed
if v4 != 1:
msgs_count = "<center style='font-family: sans-serif; color: #fff; padding: 10px'>Количество сообщений: " + str(getHistory["count"]) + "</center>"
v4 = 1
#html components
style = '''::-webkit-scrollbar {
width: 12px;
}
::-webkit-scrollbar-track {
background-color: rgba(0, 0, 0, 0.3)
}
::-webkit-scrollbar-thumb {
-webkit-border: 1px #fff;
border-radius: 20px;
background-color: #fff;
-webkit-box-shadow: inset 0 0 6px rgba(0,0,0,0.5);
}
* {
font-family: sans-serif;
}'''
html1 = f'''<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>{name}</title>
<style>
{style}
</style>
</head>
<body style='background-color: #333;'>
<div style='width: 750px; margin: 10px auto 10px auto; background: linear-gradient(to right, #3E608A, #69A3EA); border-radius: 20px'>
<center><a href='{link_user}' target='_blank'><img src='avatar.jpg' style='height: 100px; border-radius: 100px; margin-top: 20px; box-shadow: 0px 0px 10px #000'></a></center>
<center style='padding: 10px; font-size: 20px; color: #fff'>{name}</center>
{users}
{msgs_count}
</div>
<div style='width: 750px; box-sizing: padding-box; background: linear-gradient(to right, #3E608A, #69A3EA); border-radius: 20px; margin-left: auto; margin-right: auto'>'''
html3 = '''
</div>
</body>
</html>'''
#if log was shown, the will not be displayed
if v4 != 1:
succes("HTML шаблоны созданы!")
v4 = 1
try:
for i in range(int(200)):
edit_msg = ""
if "update_time" in getHistory["items"][i]:
edit_msg = (dt.fromtimestamp(getHistory["items"][i]["update_time"])).strftime('%d %B %Y %H:%M:%S')
if edit_msg != "":
edit_msg = f"(ред. {edit_msg})"
try:
getUsers = session.method("users.get", {
"user_ids": getHistory["items"][i]["from_id"],
"fields": "photo_50"
})
except IndexError:
getUsers = session.method("users.get", {
"user_ids": getHistory[i]["from_id"],
"fields": "photo_50"})
#if have parametr _ul (user link), the name user replace on the link
if _ul == 1:
user = f"<a href='https://vk.com/id{getHistory['items'][i]['from_id']}'>{getUsers[0]['first_name']}</a>"
else:
user = getUsers[0]["first_name"]
#if type dialog - chat, download user avatars
if type_ == "chat":
ava_msg = getUsers[0]["photo_50"]
ava_msg = f"<div style='padding: 5px; float: left'><img src='{ava_msg}' style='border-radius: 50px; height: 50px'></div>"
else:
ava_msg = ""
if len(getHistory["items"][i]["attachments"]) >= 1:
if getHistory["items"][i]["attachments"][0]["type"] == "photo":
sms = getHistory["items"][i]["text"]
if _photo == 0:
url_photo = getHistory["items"][i]["attachments"][0]["photo"]["sizes"][SIZE_PHOTO]["url"]
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}<br><br>
<img src='{url_photo}' style='height: 200px; border-radius: 20px; box-shadow: 0px 0px 10px #000'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<img src='{url_photo}' style='height: 200px; border-radius: 20px; box-shadow: 0px 0px 10px #000'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _photo == 1:
url_photo = requests.get(getHistory["items"][i]["attachments"][0]["photo"]["sizes"][SIZE_PHOTO]["url"])
name_photo = f"{(dt.fromtimestamp(getHistory['items'][i]['attachments'][0]['photo']['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.jpg"
if _folder == 0:
with open(name_photo, "wb") as file:
file.write(url_photo.content)
elif _folder != 0:
with open(f"{_folder}/{name_photo}", "wb") as file:
file.write(url_photo.content)
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<img src='{name_photo}' style='height: 200px; border-radius: 20px; box-shadow: 0px 0px 10px #000'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<img src='{name_photo}' style='height: 200px; border-radius: 20px;'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "audio_message":
sms = getHistory["items"][i]["text"]
transcription = getHistory["items"][i]["attachments"][0]["audio_message"]["transcript"]
if _audio == 0:
url_audio = getHistory["items"][i]["attachments"][0]["audio_message"]["link_mp3"]
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{url_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<audio src='{url_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _audio == 1:
url_audio = requests.get(getHistory["items"][i]["attachments"][0]["audio_message"]["link_mp3"])
name_audio = f"{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.mp3"
if _folder == 0:
with open(name_audio, "wb") as file:
file.write(url_audio.content)
elif _folder != 0:
with open(f"{_folder}/{name_audio}", "wb") as file:
file.write(url_audio.content)
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{name_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<audio src='{name_audio}' controls='controls'></audio>
<br>
<span>{transcription}</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "audio":
sms = getHistory["items"][i]["text"]
if _music == 0:
url_music = getHistory["items"][i]["attachments"][0]["audio"]["url"]
explicit = getHistory['items'][i]['attachments'][0]['audio']['is_explicit']
if explicit == True:
explicit = "🅴"
else:
explicit = ""
if getHistory["items"][i]["text"] != "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _music == 1:
url_music = requests.get(getHistory["items"][i]["attachments"][0]["audio"]["url"])
name_music = f"{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.mp3"
if getHistory["items"][i]["text"] != "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
if _cv != 0:
photo_music = getHistory['items'][0]['attachments'][0]['audio']['album']['thumb']['photo_135']
audio_title = getHistory['items'][0]['attachments'][0]['audio']['title']
artist = getHistory['items'][0]['attachments'][0]['audio']['artist']
album = getHistory['items'][0]['attachments'][0]['audio']['album']['title']
if photo_music != "":
dwn_track_photo = str(input("Скачивать обложки треков (y - да, n - нет): "))
if dwn_track_photo == "y":
title = audio_title.split()
title = ''.join(title)
title = title + "_" + artist
if _folder == 0:
photo_music = requests.get(photo_music)
with open(f"{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
else:
photo_music = requests.get(photo_music)
with open(f"{_folder}/{title}.jpg", "wb") as file:
file.write(photo_music.content)
photo_music = title
audio = f'''<div style=''>
<div style='padding: 10px; margin: auto auto auto 50px'>
<div style='padding: 10px; margin: -20px auto auto -70px'>
<img src='{photo_music}.jpg' style='border-radius: 10px; box-shadow: 0px 0px 10px #000'>
</div>
<div style='margin: -80px auto auto 80px; padding: 5px;'>
<span style=''>Трек: {audio_title}</span><br>
<span style=''>Артист: {artist}</span><br>
<span style=''>Альбом: {album}</span><br>
</div>
</div>
<audio src='{url_music}' controls='controls' style='padding: 5px'></audio>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
audio = f"<audio src='{url_music}' controls='controls'></audio>"
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{audio}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "doc":
sms = getHistory["items"][i]["text"]
if _doc == 0:
url_doc = getHistory["items"][i]["attachments"][0]["doc"]["url"]
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<a href='{url_doc}'>
ДОКУМЕНТ
</a>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["text"] == "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<a href='{url_doc}'>
ДОКУМЕНТ
</a>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{getUsers[0]["first_name"]}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif _doc == 1:
url_doc = getHistory["items"][i]["attachments"][0]["doc"]["url"]
doc_type = requests.get(getHistory["items"][i]["attachments"][0]["doc"]["ext"])
name_doc = f"{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d.%m.%y-%H.%M.%S')}_{getUsers[0]['first_name']}-{getUsers[0]['last_name']}.{doc_type}"
if getHistory["items"][i]["text"] != "":
sms = getHistory["items"][i]["text"]
if doc_type == "jpg" or doc_type == "png" or doc_type == "bmp":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<img src='{name_doc}' style='height: 80px'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"name_doc", "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif doc_type == "mp3" or doc_type == "wav" or doc_type == "aac":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{name_doc}' controls='controls'></audio>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(name_doc, "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif getHistory["items"][i]["text"] == "":
if doc_type == "jpg" or doc_type == "png" or doc_type == "bmp":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<img src='{name_doc}' style='height: 80px'>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"name_doc", "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif doc_type == "mp3" or doc_type == "wav" or doc_type == "aac":
if getHistory["items"][i]["text"] != "":
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{sms}
<br>
<audio src='{name_doc}' controls='controls'></audio>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
if _folder == 0:
with open(f"name_doc", "wb") as file:
file.write(url_doc.content)
elif _folder != 0:
with open(f"{_folder}/{name_doc}", "wb") as file:
file.write(url_doc.content)
elif getHistory["items"][i]["attachments"][0]["type"] == "poll":
question = getHistory["items"][i]["attachments"][0]["poll"]["question"]
answers = getHistory["items"][i]["attachments"][0]["poll"]["answers"]
answers_ = ""
for j in range(len(answers)):
answer = getHistory["items"][i]["attachments"][0]["poll"]["answers"][j]["text"]
vote = getHistory["items"][i]["attachments"][0]["poll"]["answers"][j]["votes"]
rate = getHistory["items"][i]["attachments"][0]["poll"]["answers"][j]["rate"]
answer_html = f'''<div style='width: 200px; height: 20px; background-color: rgba(255, 255, 255, .3); padding: 5px; border-radius: 5px; margin: 5px'>
<span style='float: keft: margin-left: 10px; font-size: 15px'>{answer} · {vote}</span>
</div>'''
answers_ += answer_html
poll = f'''<div style='color: #fff'>
<center style='font-weight: bold; font-size: 15px'>ОПРОС</center>
<center style='padding: 5px'>Вопрос: {question}</center>
<div>
{answers_}
</div>
<span></span>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 630px; background-color: #249B87; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{poll}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 10px;font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
elif getHistory["items"][i]["attachments"][0]["type"] == "money_request":
comment = getHistory["items"][i]["text"]
sum_ = getHistory["items"][i]["attachments"][0]["money_request"]["total_amount"]["amount"]
finish_sum = f"{sum_[:-2:]}.{sum_[-2::]} {getHistory['items'][i]['attachments'][0]['money_request']['total_amount']['currency']['name']}"
transferred = getHistory["items"][i]["attachments"][0]["money_request"]["transferred_amount"]["amount"]
finish_post = f"{transferred[:-2:]}.{transferred[-2::]} {getHistory['items'][i]['attachments'][0]['money_request']['transferred_amount']['currency']['name']}"
money_html = f'''<div style='color: #fff'>
<span style='font-weight: bold; font-size: 15px'>ДЕНЕЖНЫЙ ЗАПРОС</span><br>
<span>Сумма: {finish_sum}</span><br>
<span>Собрано {transferred} из {finish_sum}</span><br>
<span>Комментарий: {comment}</span>
</div>'''
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 630px; background-color: #249B87; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
{money_html}
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 10px;font-weight: bold; margin-left: 5px'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
else:
if "action" in getHistory["items"][i] and getHistory["items"][i]["action"]["type"] == "chat_unpin_message":
name_ = getUsers[0]["first_name"] + " " + getUsers[0]["last_name"]
msg_ = ""
if "message" in getHistory["items"][i]:
msg_ = getHistory["items"][i]["action"]["message"]
html2 += f'''<div style='display: block; text-align: center; padding: 5px; margin: 0 auto'>
<span style='weight: bold'>{name_}</span> <span style='weight: liter'>открепил сообщение {msg_}</span>
</div>
<br>'''
elif "action" in getHistory["items"][i] and getHistory["items"][i]["action"]["type"] == "chat_pin_message":
name_ = getUsers[0]["first_name"] + " " + getUsers[0]["last_name"]
msg_ = ""
if "message" in getHistory["items"][i]:
msg_ = getHistory["items"][i]["action"]["message"]
html2 += f'''<div style='display: block; text-align: center; padding: 5px; margin: 0 auto'>
<span style='weight: bold'>{name_}</span> <span style='weight: liter'>закрепил сообщение {msg_}</span>
</div>
<br>'''
elif len(getHistory["items"][0]["fwd_messages"]) >= 1:
sms = getHistory["items"][i]["text"]
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<code>{sms}</code><br>
<span style="color: red; font-size: 15px; font-weight: bold">ПЕРЕСЛАННЫЕ СООБЩЕНИЯ НЕ ПОДДЕРЖИВАЮТСЯ!</span>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div><br>'''
else:
sms = getHistory["items"][i]["text"]
html2 += f'''{ava_msg}<div style='display: inline-block; max-width: 600px; background-color: #D6E1E7; padding: 10px; border-radius: 20px; margin: 10px -50px auto 5px'>
<code>{sms}</code>
<span style='font-size: 10px; color: #000; font-weight: bold; margin-left: 5px'>
{user}
</span>
</div>
<div style='display: block; padding: 5px; font-size: 12px; font-weight: bold'>
{(dt.fromtimestamp(getHistory['items'][i]['date'])).strftime('%d %B %Y %H:%M:%S')} {edit_msg}
</div>
<br>'''
#offset in messages
off += 200
if getHistory["count"] < 200:
off = getHistory["count"]
elif getHistory["count"] == 0:
sys.exit(1)
#joining all fragments html yemplates
html_join = html1 + html2 + html3
succes("Создание HTML страницы завершено!")
except:
pass
if _folder == 0:
with open(f"{name}.html", "w", encoding="utf-8") as file:
file.write(html_join)
succes(f"Сохранено {off} сообщений!")
elif _folder != 0:
with open(f"{_folder}/{name}.html", "w", encoding="utf-8") as file:
file.write(html_join)
succes(f"Сохранено {off} сообщений!")
#delete folder __pycache__ after the work of the program
try:
shutil.rmtree("__pycache__")
except Exception:
pass
except KeyboardInterrupt:
warn("Выход!")
sys.exit(1) | 50.146532 | 212 | 0.530782 | 10,497 | 89,662 | 4.449938 | 0.041917 | 0.072895 | 0.066109 | 0.043673 | 0.947229 | 0.943589 | 0.939265 | 0.936075 | 0.93128 | 0.924193 | 0 | 0.039113 | 0.286554 | 89,662 | 1,788 | 213 | 50.146532 | 0.691038 | 0.0148 | 0 | 0.917067 | 0 | 0.170673 | 0.662547 | 0.091029 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.004808 | 0.00601 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
76af83faee9a48c3c28f2ff08153ed715b56cffc | 52,306 | py | Python | cosmotech_api/api/organization_api.py | Cosmo-Tech/cosmotech-api-python-client | 49ef58784605da8dd6b290cab15b363671991d8e | [
"MIT"
] | null | null | null | cosmotech_api/api/organization_api.py | Cosmo-Tech/cosmotech-api-python-client | 49ef58784605da8dd6b290cab15b363671991d8e | [
"MIT"
] | null | null | null | cosmotech_api/api/organization_api.py | Cosmo-Tech/cosmotech-api-python-client | 49ef58784605da8dd6b290cab15b363671991d8e | [
"MIT"
] | null | null | null | """
Cosmo Tech Plaform API
Cosmo Tech Platform API # noqa: E501
The version of the OpenAPI document: 0.0.11-SNAPSHOT
Contact: platform@cosmotech.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from cosmotech_api.api_client import ApiClient, Endpoint as _Endpoint
from cosmotech_api.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from cosmotech_api.model.organization import Organization
from cosmotech_api.model.organization_service import OrganizationService
from cosmotech_api.model.organization_user import OrganizationUser
class OrganizationApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.add_or_replace_users_in_organization_endpoint = _Endpoint(
settings={
'response_type': ([OrganizationUser],),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}/users',
'operation_id': 'add_or_replace_users_in_organization',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'organization_id',
'organization_user',
],
'required': [
'organization_id',
'organization_user',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
'organization_user':
([OrganizationUser],),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
'organization_user': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.find_all_organizations_endpoint = _Endpoint(
settings={
'response_type': ([Organization],),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations',
'operation_id': 'find_all_organizations',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.find_organization_by_id_endpoint = _Endpoint(
settings={
'response_type': (Organization,),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}',
'operation_id': 'find_organization_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'organization_id',
],
'required': [
'organization_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.register_organization_endpoint = _Endpoint(
settings={
'response_type': (Organization,),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations',
'operation_id': 'register_organization',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'organization',
],
'required': [
'organization',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization':
(Organization,),
},
'attribute_map': {
},
'location_map': {
'organization': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/yaml'
]
},
api_client=api_client
)
self.remove_all_users_in_organization_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}/users',
'operation_id': 'remove_all_users_in_organization',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'organization_id',
],
'required': [
'organization_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client
)
self.remove_user_from_organization_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}/users/{user_id}',
'operation_id': 'remove_user_from_organization',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'organization_id',
'user_id',
],
'required': [
'organization_id',
'user_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
'user_id':
(str,),
},
'attribute_map': {
'organization_id': 'organization_id',
'user_id': 'user_id',
},
'location_map': {
'organization_id': 'path',
'user_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client
)
self.unregister_organization_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}',
'operation_id': 'unregister_organization',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'organization_id',
],
'required': [
'organization_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client
)
self.update_organization_endpoint = _Endpoint(
settings={
'response_type': (Organization,),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}',
'operation_id': 'update_organization',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'organization_id',
'organization',
],
'required': [
'organization_id',
'organization',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
'organization':
(Organization,),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
'organization': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/yaml'
]
},
api_client=api_client
)
self.update_solutions_container_registry_by_organization_id_endpoint = _Endpoint(
settings={
'response_type': (OrganizationService,),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}/services/solutionsContainerRegistry',
'operation_id': 'update_solutions_container_registry_by_organization_id',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'organization_id',
'organization_service',
],
'required': [
'organization_id',
'organization_service',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
'organization_service':
(OrganizationService,),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
'organization_service': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/yaml'
]
},
api_client=api_client
)
self.update_storage_by_organization_id_endpoint = _Endpoint(
settings={
'response_type': (OrganizationService,),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}/services/storage',
'operation_id': 'update_storage_by_organization_id',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'organization_id',
'organization_service',
],
'required': [
'organization_id',
'organization_service',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
'organization_service':
(OrganizationService,),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
'organization_service': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json',
'application/yaml'
]
},
api_client=api_client
)
self.update_tenant_credentials_by_organization_id_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [
'oAuth2AuthCode'
],
'endpoint_path': '/organizations/{organization_id}/services/tenantCredentials',
'operation_id': 'update_tenant_credentials_by_organization_id',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'organization_id',
'request_body',
],
'required': [
'organization_id',
'request_body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'organization_id':
(str,),
'request_body':
({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
},
'attribute_map': {
'organization_id': 'organization_id',
},
'location_map': {
'organization_id': 'path',
'request_body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
def add_or_replace_users_in_organization(
self,
organization_id,
organization_user,
**kwargs
):
"""Add (or replace) users in the Organization specified # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_or_replace_users_in_organization(organization_id, organization_user, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
organization_user ([OrganizationUser]): the Users to add. Any User with the same ID is overwritten
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[OrganizationUser]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
kwargs['organization_user'] = \
organization_user
return self.add_or_replace_users_in_organization_endpoint.call_with_http_info(**kwargs)
def find_all_organizations(
self,
**kwargs
):
"""List all Organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_all_organizations(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Organization]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.find_all_organizations_endpoint.call_with_http_info(**kwargs)
def find_organization_by_id(
self,
organization_id,
**kwargs
):
"""Get the details of an Organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_organization_by_id(organization_id, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Organization
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
return self.find_organization_by_id_endpoint.call_with_http_info(**kwargs)
def register_organization(
self,
organization,
**kwargs
):
"""Register a new organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_organization(organization, async_req=True)
>>> result = thread.get()
Args:
organization (Organization): the Organization to register
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Organization
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization'] = \
organization
return self.register_organization_endpoint.call_with_http_info(**kwargs)
def remove_all_users_in_organization(
self,
organization_id,
**kwargs
):
"""Remove all users from the Organization specified # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_all_users_in_organization(organization_id, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
return self.remove_all_users_in_organization_endpoint.call_with_http_info(**kwargs)
def remove_user_from_organization(
self,
organization_id,
user_id,
**kwargs
):
"""Remove the specified user from the given Organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_user_from_organization(organization_id, user_id, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
user_id (str): the User identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
kwargs['user_id'] = \
user_id
return self.remove_user_from_organization_endpoint.call_with_http_info(**kwargs)
def unregister_organization(
self,
organization_id,
**kwargs
):
"""Unregister an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unregister_organization(organization_id, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
return self.unregister_organization_endpoint.call_with_http_info(**kwargs)
def update_organization(
self,
organization_id,
organization,
**kwargs
):
"""Update an Organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_organization(organization_id, organization, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
organization (Organization): the new Organization details
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Organization
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
kwargs['organization'] = \
organization
return self.update_organization_endpoint.call_with_http_info(**kwargs)
def update_solutions_container_registry_by_organization_id(
self,
organization_id,
organization_service,
**kwargs
):
"""Update the solutions container registry configuration for the Organization specified # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_solutions_container_registry_by_organization_id(organization_id, organization_service, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
organization_service (OrganizationService): the new solutions container registry configuration to use
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
OrganizationService
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
kwargs['organization_service'] = \
organization_service
return self.update_solutions_container_registry_by_organization_id_endpoint.call_with_http_info(**kwargs)
def update_storage_by_organization_id(
self,
organization_id,
organization_service,
**kwargs
):
"""Update storage configuration for the Organization specified # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_storage_by_organization_id(organization_id, organization_service, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
organization_service (OrganizationService): the new Storage configuration to use
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
OrganizationService
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
kwargs['organization_service'] = \
organization_service
return self.update_storage_by_organization_id_endpoint.call_with_http_info(**kwargs)
def update_tenant_credentials_by_organization_id(
self,
organization_id,
request_body,
**kwargs
):
"""Update tenant credentials for the Organization specified # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tenant_credentials_by_organization_id(organization_id, request_body, async_req=True)
>>> result = thread.get()
Args:
organization_id (str): the Organization identifier
request_body ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): the new Tenant Credentials to use
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: (bool, date, datetime, dict, float, int, list, str, none_type)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['organization_id'] = \
organization_id
kwargs['request_body'] = \
request_body
return self.update_tenant_credentials_by_organization_id_endpoint.call_with_http_info(**kwargs)
| 36.705965 | 134 | 0.508221 | 4,687 | 52,306 | 5.40431 | 0.044378 | 0.067983 | 0.022582 | 0.02345 | 0.9212 | 0.905409 | 0.890209 | 0.88563 | 0.866522 | 0.853296 | 0 | 0.0023 | 0.409953 | 52,306 | 1,424 | 135 | 36.731742 | 0.818423 | 0.358544 | 0 | 0.689873 | 1 | 0 | 0.25009 | 0.046954 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012658 | false | 0 | 0.007384 | 0 | 0.0327 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
76cd569bad8ab91584a795c6a542aba65b1ac9a2 | 1,775 | py | Python | core/migrations/0002_auto_20190606_0041.py | MubongwoNdasi/pms | 0cc5dcbc25b31e13631672e1a03c88e2ad46bc92 | [
"MIT"
] | null | null | null | core/migrations/0002_auto_20190606_0041.py | MubongwoNdasi/pms | 0cc5dcbc25b31e13631672e1a03c88e2ad46bc92 | [
"MIT"
] | 8 | 2021-03-18T22:27:44.000Z | 2022-02-10T09:18:50.000Z | core/migrations/0002_auto_20190606_0041.py | MubongwoNdasi/pms | 0cc5dcbc25b31e13631672e1a03c88e2ad46bc92 | [
"MIT"
] | 1 | 2021-09-20T06:37:41.000Z | 2021-09-20T06:37:41.000Z | # Generated by Django 2.2 on 2019-06-06 00:41
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='city',
name='created_on',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='added on'),
preserve_default=False,
),
migrations.AddField(
model_name='city',
name='updated_on',
field=models.DateTimeField(auto_now=True, verbose_name='updated on'),
),
migrations.AddField(
model_name='pharmacy',
name='created_on',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='added on'),
preserve_default=False,
),
migrations.AddField(
model_name='pharmacy',
name='updated_on',
field=models.DateTimeField(auto_now=True, verbose_name='updated on'),
),
migrations.AddField(
model_name='pharmacyuser',
name='updated_on',
field=models.DateTimeField(auto_now=True, verbose_name='updated on'),
),
migrations.AddField(
model_name='profile',
name='created_on',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='added on'),
preserve_default=False,
),
migrations.AddField(
model_name='profile',
name='updated_on',
field=models.DateTimeField(auto_now=True, verbose_name='updated on'),
),
]
| 33.490566 | 118 | 0.59831 | 183 | 1,775 | 5.612022 | 0.234973 | 0.085686 | 0.101266 | 0.184031 | 0.830574 | 0.830574 | 0.767283 | 0.767283 | 0.767283 | 0.767283 | 0 | 0.014241 | 0.287887 | 1,775 | 52 | 119 | 34.134615 | 0.798259 | 0.024225 | 0 | 0.804348 | 1 | 0 | 0.115607 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.043478 | 0 | 0.108696 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
96dd3c567c5ea20c7d4d5f57ede9e17a0c80ba43 | 5,046 | py | Python | tests/test_api.py | datahappy1/flask_mvc_github_boilerplate | ef30b993b30fe940a1643bb008b825f1e7c7f2bc | [
"MIT"
] | null | null | null | tests/test_api.py | datahappy1/flask_mvc_github_boilerplate | ef30b993b30fe940a1643bb008b825f1e7c7f2bc | [
"MIT"
] | null | null | null | tests/test_api.py | datahappy1/flask_mvc_github_boilerplate | ef30b993b30fe940a1643bb008b825f1e7c7f2bc | [
"MIT"
] | 1 | 2020-02-23T15:57:24.000Z | 2020-02-23T15:57:24.000Z | """
test api endpoints
"""
import uuid
import os
from flaskr.settings import API_BASE_ENDPOINT
from flaskr.app import APP
TEST_RUNNER_ID = uuid.uuid4()
TEST_BRANCH_NAME = 'requests_test_{}'.format(TEST_RUNNER_ID)
TEST_FILE_NAME = 'test_file_99d4c5aa-4a57-4e76-9962-e38ea5a54895.txt'
def test_request_create_branch():
test_case_endpoint = '{}/branches'.format(API_BASE_ENDPOINT)
test_case_params = {'branch_name_tgt': TEST_BRANCH_NAME, 'branch_name_src': 'master'}
test_case_expected_status_code = 201
response = APP.test_client().post(test_case_endpoint,
data=test_case_params)
assert response.status_code == test_case_expected_status_code
def test_request_get_branches():
test_case_endpoint = '{}/branches'.format(API_BASE_ENDPOINT)
test_case_expected_status_code = 200
response = APP.test_client().get(test_case_endpoint)
assert response.status_code == test_case_expected_status_code
def test_request_create_file1_upload():
test_case_endpoint = '{}/branches/{}/files'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME)
test_case_params = {'commit_message': 'pytest'}
test_case_expected_status_code = 201
with open(os.path.join(os.getcwd(), 'files', TEST_FILE_NAME), 'rb') as fp:
data = test_case_params
data['uploaded_file'] = (fp, fp.name)
response = APP.test_client().post(test_case_endpoint,
content_type="multipart/form-data",
data=data)
assert response.status_code == test_case_expected_status_code
def test_request_edit_file1():
test_case_endpoint ='{}/branches/{}/files/{}'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME, TEST_FILE_NAME)
test_case_params = {'commit_message': 'pytest'}
test_case_expected_status_code = 200
with open(os.path.join(os.getcwd(), 'files', TEST_FILE_NAME), 'rb') as fp:
file_content = fp.read()
data = test_case_params
data['file_contents'] = file_content
response = APP.test_client().put(test_case_endpoint,
data=data)
assert response.status_code == test_case_expected_status_code
def test_request_delete_file1():
test_case_endpoint ='{}/branches/{}/files/{}'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME, TEST_FILE_NAME)
test_case_params = {'commit_message': 'pytest'}
test_case_expected_status_code = 200
response = APP.test_client().delete(test_case_endpoint,
data=test_case_params)
assert response.status_code == test_case_expected_status_code
def test_request_create_file2_form():
test_case_endpoint = '{}/branches/{}/files'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME)
test_case_params = {'commit_message': 'pytest', 'file_name': TEST_FILE_NAME}
test_case_expected_status_code = 201
with open(os.path.join(os.getcwd(), 'files', TEST_FILE_NAME), 'rb') as fp:
file_content = fp.read()
data = test_case_params
data['file_contents'] = file_content
response = APP.test_client().post(test_case_endpoint,
data=data)
assert response.status_code == test_case_expected_status_code
def test_request_override_file2():
test_case_endpoint ='{}/branches/{}/files/{}'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME, TEST_FILE_NAME)
test_case_params = {'commit_message': 'pytest'}
test_case_expected_status_code = 200
with open(os.path.join(os.getcwd(), 'files', TEST_FILE_NAME), 'rb') as fp:
data = test_case_params
data['uploaded_file'] = (fp, fp.name)
response = APP.test_client().put(test_case_endpoint,
content_type="multipart/form-data",
data=data)
assert response.status_code == test_case_expected_status_code
def test_request_get_files():
test_case_endpoint = '{}/branches/{}/files'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME)
test_case_expected_success_status_code = 200
response = APP.test_client().get(test_case_endpoint)
assert response.status_code == test_case_expected_success_status_code
def test_request_delete_file2():
test_case_endpoint = '{}/branches/{}/files/{}'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME, TEST_FILE_NAME)
test_case_params = {'commit_message': 'pytest'}
test_case_expected_status_code = 200
response = APP.test_client().delete(test_case_endpoint,
data=test_case_params)
assert response.status_code == test_case_expected_status_code
def test_request_delete_branch():
test_case_endpoint = '{}/branches/{}'.format(API_BASE_ENDPOINT, TEST_BRANCH_NAME)
test_case_params = {}
test_case_expected_status_code = 200
response = APP.test_client().delete(test_case_endpoint,
data=test_case_params)
assert response.status_code == test_case_expected_status_code
| 37.656716 | 110 | 0.691241 | 644 | 5,046 | 4.954969 | 0.113354 | 0.140395 | 0.100282 | 0.124099 | 0.894704 | 0.894704 | 0.864306 | 0.864306 | 0.861172 | 0.852084 | 0 | 0.014735 | 0.2065 | 5,046 | 133 | 111 | 37.93985 | 0.782218 | 0.003567 | 0 | 0.741573 | 0 | 0 | 0.106972 | 0.028287 | 0 | 0 | 0 | 0 | 0.11236 | 1 | 0.11236 | false | 0 | 0.044944 | 0 | 0.157303 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
96e656c68925ddb92507da08efb7e596a8d0d719 | 148 | py | Python | snippets - machine learning sklearn/cross-validation.py | bjfisica/MachineLearning | 20349301ae7f82cd5048410b0cf1f7a5f7d7e5a2 | [
"MIT"
] | 52 | 2019-02-15T16:37:13.000Z | 2022-02-17T18:34:30.000Z | snippets - machine learning sklearn/cross-validation.py | RodeoBlues/Complete-Data-Science-Toolkits | c5e83889e24af825ec3baed6e8198debb135f1ff | [
"MIT"
] | null | null | null | snippets - machine learning sklearn/cross-validation.py | RodeoBlues/Complete-Data-Science-Toolkits | c5e83889e24af825ec3baed6e8198debb135f1ff | [
"MIT"
] | 22 | 2019-02-25T23:52:09.000Z | 2021-09-21T03:09:35.000Z | from sklearn.cross_validation import cross_val_score
print(cross_val_score(knn, X_train, y_train, cv = 5))
print(cross_val_score(lr, X, y, cv = 4))
| 37 | 53 | 0.777027 | 28 | 148 | 3.785714 | 0.571429 | 0.226415 | 0.367925 | 0.339623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015152 | 0.108108 | 148 | 3 | 54 | 49.333333 | 0.787879 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0.666667 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
96e88362a591582982d17dc7a9d2d03cf808c205 | 19,677 | py | Python | tests/test_decorators.py | Jeketam/supertokens-flask | 36a4b798f81e03caac8b0985c8c6f736b0aa186b | [
"Apache-2.0"
] | null | null | null | tests/test_decorators.py | Jeketam/supertokens-flask | 36a4b798f81e03caac8b0985c8c6f736b0aa186b | [
"Apache-2.0"
] | null | null | null | tests/test_decorators.py | Jeketam/supertokens-flask | 36a4b798f81e03caac8b0985c8c6f736b0aa186b | [
"Apache-2.0"
] | null | null | null | """
Copyright (c) 2020, VRAI Labs and/or its affiliates. All rights reserved.
This software is licensed under the Apache License, Version 2.0 (the
"License") as published by the Apache Software Foundation.
You may not use this file except in compliance with the License. You may
obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
"""
from supertokens_flask.constants import (
COOKIE_DOMAIN_CONFIG,
HOSTS_CONFIG, ACCESS_TOKEN_PATH_CONFIG, REFRESH_TOKEN_PATH_CONFIG, COOKIE_SAME_SITE_CONFIG, COOKIE_SECURE_CONFIG
)
from .utils import (
reset, setup_st, clean_st, start_st,
extract_all_cookies,
get_unix_timestamp, TEST_DRIVER_CONFIG_COOKIE_DOMAIN, TEST_DRIVER_CONFIG_ACCESS_TOKEN_PATH,
TEST_DRIVER_CONFIG_REFRESH_TOKEN_PATH, TEST_DRIVER_CONFIG_COOKIE_SAME_SITE, TEST_DRIVER_CONFIG_COOKIE_SECURE
)
from supertokens_flask import (
supertokens_middleware,
create_new_session,
SuperTokens
)
from pytest import fixture
from flask import request, Flask, jsonify, g, make_response
def setup_function(f):
reset()
clean_st()
setup_st()
def teardown_function(f):
reset()
clean_st()
@fixture(scope='function')
def app():
app = Flask(__name__)
app.config[COOKIE_DOMAIN_CONFIG] = 'supertokens.io'
supertokens = SuperTokens(app)
def ff(e):
return jsonify({'error_msg': 'try refresh token'}), 401
supertokens.set_try_refresh_token_error_handler(ff)
@app.route('/login')
def login():
user_id = 'userId'
response = make_response(jsonify({'userId': user_id}), 200)
create_new_session(response, user_id, {}, {})
return response
@app.route('/refresh', methods=['POST'])
@supertokens_middleware
def refresh():
return {'userId': g.supertokens.get_user_id()}
@app.route('/info', methods=['GET', 'OPTIONS'])
@supertokens_middleware
def info():
if request.method == 'OPTIONS':
return jsonify({'method': 'option'})
return jsonify({'userId': g.supertokens.get_user_id()})
@app.route('/logout', methods=['POST'])
@supertokens_middleware()
def logout():
g.supertokens.revoke_session()
return jsonify({'success': True})
return app
@fixture(scope='function')
def driver_config_app():
app = Flask(__name__)
app.config[COOKIE_DOMAIN_CONFIG] = TEST_DRIVER_CONFIG_COOKIE_DOMAIN
app.config[ACCESS_TOKEN_PATH_CONFIG] = TEST_DRIVER_CONFIG_ACCESS_TOKEN_PATH
app.config[REFRESH_TOKEN_PATH_CONFIG] = TEST_DRIVER_CONFIG_REFRESH_TOKEN_PATH
app.config[COOKIE_SAME_SITE_CONFIG] = TEST_DRIVER_CONFIG_COOKIE_SAME_SITE
app.config[COOKIE_SECURE_CONFIG] = TEST_DRIVER_CONFIG_COOKIE_SECURE
app.config[HOSTS_CONFIG] = 'https://try.supertokens.io'
supertokens = SuperTokens(app)
def ff(e):
return jsonify({'error_msg': 'try refresh token'}), 401
supertokens.set_try_refresh_token_error_handler(ff)
@app.route('/login')
def login():
user_id = 'userId'
response = make_response(jsonify({'userId': user_id}), 200)
create_new_session(response, user_id, {}, {})
return response
@app.route('/custom/refresh', methods=['POST'])
@supertokens_middleware
def refresh():
return {'userId': g.supertokens.get_user_id()}
@app.route('/custom/info', methods=['GET', 'OPTIONS'])
@supertokens_middleware
def info():
if request.method == 'OPTIONS':
return jsonify({'method': 'option'})
return jsonify({'userId': g.supertokens.get_user_id()})
@app.route('/custom/logout', methods=['POST'])
@supertokens_middleware()
def logout():
g.supertokens.revoke_session()
return jsonify({'success': True})
return app
def test_decorators_with_app(app):
start_st()
response_1 = app.test_client().get('/login')
assert response_1.json == {'userId': 'userId'}
assert response_1.status_code == 200
cookies_1 = extract_all_cookies(response_1)
request_2 = app.test_client()
request_2.set_cookie(
'localhost',
'sAccessToken',
cookies_1['sAccessToken']['value'])
request_2.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_1['sIdRefreshToken']['value'])
response_2 = request_2.get(
'/info',
headers={
'anti-csrf': response_1.headers.get('anti-csrf')})
assert response_2.json == {'userId': 'userId'}
assert response_2.status_code == 200
request_3 = app.test_client()
request_3.set_cookie(
'localhost',
'sRefreshToken',
cookies_1['sRefreshToken']['value'])
response_3 = request_3.post('/refresh', headers={
'anti-csrf': response_1.headers.get('anti-csrf')})
assert response_3.json == {'userId': 'userId'}
assert response_3.status_code == 200
cookies_3 = extract_all_cookies(response_3)
assert cookies_1['sAccessToken']['value'] != cookies_3['sAccessToken']['value']
assert cookies_1['sRefreshToken']['value'] != cookies_3['sRefreshToken']['value']
assert cookies_1['sIdRefreshToken']['value'] != cookies_3['sIdRefreshToken']['value']
assert response_3.headers.get('anti-csrf') is not None
assert cookies_3['sAccessToken']['domain'] == 'supertokens.io'
assert cookies_3['sRefreshToken']['domain'] == 'supertokens.io'
assert cookies_3['sIdRefreshToken']['domain'] == 'supertokens.io'
assert cookies_3['sAccessToken']['path'] == '/'
assert cookies_3['sRefreshToken']['path'] == '/refresh'
assert cookies_3['sIdRefreshToken']['path'] == '/'
assert cookies_3['sAccessToken']['httponly']
assert cookies_3['sRefreshToken']['httponly']
assert cookies_3['sIdRefreshToken']['httponly']
assert cookies_3['sAccessToken'].get('samesite') == 'Lax' or cookies_3['sAccessToken'].get('samesite') == 'None'
assert cookies_3['sRefreshToken'].get('samesite') == 'Lax' or cookies_3['sRefreshToken'].get('samesite') == 'None'
assert cookies_3['sIdRefreshToken'].get('samesite') == 'Lax' or cookies_3['sIdRefreshToken'].get('samesite') == 'None'
assert cookies_3['sAccessToken'].get('secure') is None
assert cookies_3['sRefreshToken'].get('secure') is None
assert cookies_3['sIdRefreshToken'].get('secure') is None
request_4 = app.test_client()
request_4.set_cookie(
'localhost',
'sAccessToken',
cookies_3['sAccessToken']['value'])
request_4.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_3['sIdRefreshToken']['value'])
response_4 = request_4.get(
'/info',
headers={
'anti-csrf': response_3.headers.get('anti-csrf')})
assert response_4.json == {'userId': 'userId'}
assert response_4.status_code == 200
cookies_4 = extract_all_cookies(response_4)
assert cookies_4['sAccessToken']['value'] != cookies_3['sAccessToken']['value']
assert response_4.headers.get('anti-csrf') is None
assert cookies_4.get('sRefreshToken') is None
assert cookies_4.get('sIdRefreshToken') is None
assert cookies_4['sAccessToken']['domain'] == 'supertokens.io'
assert cookies_4['sAccessToken']['path'] == '/'
assert cookies_4['sAccessToken']['httponly']
assert cookies_4['sAccessToken'].get('samesite') == 'Lax' or cookies_4['sAccessToken'].get('samesite') == 'None'
assert cookies_4['sAccessToken'].get('secure') is None
response_5 = app.test_client().options('/info')
assert response_5.json == {'method': 'option'}
assert response_5.status_code == 200
request_6 = app.test_client()
request_6.set_cookie(
'localhost',
'sAccessToken',
cookies_4['sAccessToken']['value'])
request_6.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_3['sIdRefreshToken']['value'])
response_6 = request_6.get(
'/info',
headers={
'anti-csrf': response_3.headers.get('anti-csrf')})
assert response_6.json == {'userId': 'userId'}
assert response_6.status_code == 200
request_7 = app.test_client()
request_7.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_3['sIdRefreshToken']['value'])
response_7 = request_7.get('/info')
assert response_7.json == {'error_msg': 'try refresh token'}
assert response_7.status_code == 401 or response_7.status_code == 440
response_8 = app.test_client().get('/info')
assert response_8.json == {'error': 'unauthorised'}
assert response_8.status_code == 401 or response_8.status_code == 440
cookies_8 = extract_all_cookies(response_8)
assert cookies_8['sAccessToken']['value'] == ''
assert cookies_8['sRefreshToken']['value'] == ''
assert cookies_8['sIdRefreshToken']['value'] == ''
assert get_unix_timestamp(cookies_8['sAccessToken']['expires']) == 0
assert get_unix_timestamp(cookies_8['sRefreshToken']['expires']) == 0
assert get_unix_timestamp(cookies_8['sIdRefreshToken']['expires']) == 0
request_9 = app.test_client()
request_9.set_cookie(
'localhost',
'sRefreshToken',
cookies_1['sRefreshToken']['value'])
response_9 = request_9.post('/refresh', headers={
'anti-csrf': response_1.headers.get('anti-csrf')})
assert response_9.json == {'error': 'token theft detected'}
assert response_9.status_code == 401 or response_9.status_code == 440
cookies_9 = extract_all_cookies(response_9)
assert cookies_9['sAccessToken']['value'] == ''
assert cookies_9['sRefreshToken']['value'] == ''
assert cookies_9['sIdRefreshToken']['value'] == ''
assert get_unix_timestamp(cookies_9['sAccessToken']['expires']) == 0
assert get_unix_timestamp(cookies_9['sRefreshToken']['expires']) == 0
assert get_unix_timestamp(cookies_9['sIdRefreshToken']['expires']) == 0
response_10 = app.test_client().get('/login')
cookies_10 = extract_all_cookies(response_10)
request_11 = app.test_client()
request_11.set_cookie(
'localhost',
'sAccessToken',
cookies_10['sAccessToken']['value'])
request_11.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_10['sIdRefreshToken']['value'])
response_11 = request_11.post(
'/logout',
headers={
'anti-csrf': response_10.headers.get('anti-csrf')})
assert response_11.json == {'success': True}
assert response_11.status_code == 200
request_12 = app.test_client()
request_12.set_cookie(
'localhost',
'sRefreshToken',
cookies_10['sRefreshToken']['value'])
response_12 = request_12.post('/refresh')
assert response_12.json == {'error': 'unauthorised'}
assert response_12.status_code == 401 or response_12.status_code == 440
cookies_12 = extract_all_cookies(response_12)
assert cookies_12['sAccessToken']['value'] == ''
assert cookies_12['sRefreshToken']['value'] == ''
assert cookies_12['sIdRefreshToken']['value'] == ''
assert get_unix_timestamp(cookies_12['sAccessToken']['expires']) == 0
assert get_unix_timestamp(cookies_12['sRefreshToken']['expires']) == 0
assert get_unix_timestamp(cookies_12['sIdRefreshToken']['expires']) == 0
def test_decorators_with_driver_config_app(driver_config_app):
response_1 = driver_config_app.test_client().get('/login')
assert response_1.json == {'userId': 'userId'}
assert response_1.status_code == 200
cookies_1 = extract_all_cookies(response_1)
request_2_a = driver_config_app.test_client()
request_2_a.set_cookie(
'localhost',
'sAccessToken',
cookies_1['sAccessToken']['value'])
request_2_a.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_1['sIdRefreshToken']['value'])
response_2_a = request_2_a.get(
'/info',
headers={
'anti-csrf': response_1.headers.get('anti-csrf')})
assert response_2_a.status_code == 404
request_2_b = driver_config_app.test_client()
request_2_b.set_cookie(
'localhost',
'sAccessToken',
cookies_1['sAccessToken']['value'])
request_2_b.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_1['sIdRefreshToken']['value'])
response_2_b = request_2_b.get(
'/custom/info',
headers={
'anti-csrf': response_1.headers.get('anti-csrf')})
assert response_2_b.json == {'userId': 'userId'}
assert response_2_b.status_code == 200
request_3 = driver_config_app.test_client()
request_3.set_cookie(
'localhost',
'sRefreshToken',
cookies_1['sRefreshToken']['value'])
response_3 = request_3.post('/custom/refresh')
assert response_3.json == {'userId': 'userId'}
assert response_3.status_code == 200
cookies_3 = extract_all_cookies(response_3)
assert cookies_1['sAccessToken']['value'] != cookies_3['sAccessToken']['value']
assert cookies_1['sRefreshToken']['value'] != cookies_3['sRefreshToken']['value']
assert cookies_1['sIdRefreshToken']['value'] != cookies_3['sIdRefreshToken']['value']
assert response_3.headers.get('anti-csrf') is not None
assert cookies_3['sAccessToken']['domain'] == TEST_DRIVER_CONFIG_COOKIE_DOMAIN
assert cookies_3['sRefreshToken']['domain'] == TEST_DRIVER_CONFIG_COOKIE_DOMAIN
assert cookies_3['sIdRefreshToken']['domain'] == TEST_DRIVER_CONFIG_COOKIE_DOMAIN
assert cookies_3['sAccessToken']['path'] == TEST_DRIVER_CONFIG_ACCESS_TOKEN_PATH
assert cookies_3['sRefreshToken']['path'] == TEST_DRIVER_CONFIG_REFRESH_TOKEN_PATH
assert cookies_3['sIdRefreshToken']['path'] == TEST_DRIVER_CONFIG_ACCESS_TOKEN_PATH
assert cookies_3['sAccessToken']['httponly']
assert cookies_3['sRefreshToken']['httponly']
assert cookies_3['sIdRefreshToken']['httponly']
assert cookies_3['sAccessToken']['samesite'] == 'Lax' or cookies_3['sAccessToken']['samesite'] == 'None'
assert cookies_3['sRefreshToken']['samesite'] == 'Lax' or cookies_3['sRefreshToken']['samesite'] == 'None'
assert cookies_3['sIdRefreshToken']['samesite'] == 'Lax' or cookies_3['sIdRefreshToken']['samesite'] == 'None'
assert cookies_3['sAccessToken']['secure']
assert cookies_3['sRefreshToken']['secure']
assert cookies_3['sIdRefreshToken']['secure']
request_4 = driver_config_app.test_client()
request_4.set_cookie(
'localhost',
'sAccessToken',
cookies_3['sAccessToken']['value'])
request_4.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_3['sIdRefreshToken']['value'])
response_4 = request_4.get(
'/custom/info',
headers={
'anti-csrf': response_3.headers.get('anti-csrf')})
assert response_4.json == {'userId': 'userId'}
assert response_4.status_code == 200
cookies_4 = extract_all_cookies(response_4)
assert cookies_4['sAccessToken']['value'] != cookies_3['sAccessToken']['value']
assert response_4.headers.get('anti-csrf') is None
assert cookies_4.get('sRefreshToken') is None
assert cookies_4.get('sIdRefreshToken') is None
assert cookies_4['sAccessToken']['domain'] == TEST_DRIVER_CONFIG_COOKIE_DOMAIN
assert cookies_4['sAccessToken']['path'] == TEST_DRIVER_CONFIG_ACCESS_TOKEN_PATH
assert cookies_4['sAccessToken']['httponly']
assert cookies_4['sAccessToken'].get('samesite') == 'Lax' or cookies_4['sAccessToken'].get('samesite') == 'None'
assert cookies_4['sAccessToken']['secure']
response_5 = driver_config_app.test_client().options('/custom/info')
assert response_5.json == {'method': 'option'}
assert response_5.status_code == 200
request_6 = driver_config_app.test_client()
request_6.set_cookie(
'localhost',
'sAccessToken',
cookies_4['sAccessToken']['value'])
request_6.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_3['sIdRefreshToken']['value'])
response_6 = request_6.get(
'/custom/info',
headers={
'anti-csrf': response_3.headers.get('anti-csrf')})
assert response_6.json == {'userId': 'userId'}
assert response_6.status_code == 200
request_7 = driver_config_app.test_client()
request_7.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_3['sIdRefreshToken']['value'])
response_7 = request_7.get('/custom/info')
assert response_7.json == {'error_msg': 'try refresh token'}
assert response_7.status_code == 401 or response_7.status_code == 440
response_8 = driver_config_app.test_client().get('/custom/info')
assert response_8.json == {'error': 'unauthorised'}
assert response_8.status_code == 401 or response_8.status_code == 440
cookies_8 = extract_all_cookies(response_8)
assert cookies_8['sAccessToken']['value'] == ''
assert cookies_8['sRefreshToken']['value'] == ''
assert cookies_8['sIdRefreshToken']['value'] == ''
assert get_unix_timestamp(cookies_8['sAccessToken']['expires']) == 0
assert get_unix_timestamp(cookies_8['sRefreshToken']['expires']) == 0
assert get_unix_timestamp(cookies_8['sIdRefreshToken']['expires']) == 0
request_9 = driver_config_app.test_client()
request_9.set_cookie(
'localhost',
'sRefreshToken',
cookies_1['sRefreshToken']['value'])
response_9 = request_9.post('/custom/refresh')
assert response_9.json == {'error': 'token theft detected'}
assert response_9.status_code == 401 or response_9.status_code == 440
cookies_9 = extract_all_cookies(response_9)
assert cookies_9['sAccessToken']['value'] == ''
assert cookies_9['sRefreshToken']['value'] == ''
assert cookies_9['sIdRefreshToken']['value'] == ''
assert get_unix_timestamp(cookies_9['sAccessToken']['expires']) == 0
assert get_unix_timestamp(cookies_9['sRefreshToken']['expires']) == 0
assert get_unix_timestamp(cookies_9['sIdRefreshToken']['expires']) == 0
response_10 = driver_config_app.test_client().get('/login')
cookies_10 = extract_all_cookies(response_10)
request_11 = driver_config_app.test_client()
request_11.set_cookie(
'localhost',
'sAccessToken',
cookies_10['sAccessToken']['value'])
request_11.set_cookie(
'localhost',
'sIdRefreshToken',
cookies_10['sIdRefreshToken']['value'])
response_11 = request_11.post(
'/custom/logout',
headers={
'anti-csrf': response_10.headers.get('anti-csrf')})
assert response_11.json == {'success': True}
assert response_11.status_code == 200
request_12 = driver_config_app.test_client()
request_12.set_cookie(
'localhost',
'sRefreshToken',
cookies_10['sRefreshToken']['value'])
response_12 = request_12.post('/custom/refresh')
assert response_12.json == {'error': 'unauthorised'}
assert response_12.status_code == 401 or response_12.status_code == 440
cookies_12 = extract_all_cookies(response_12)
assert cookies_12['sAccessToken']['value'] == ''
assert cookies_12['sRefreshToken']['value'] == ''
assert cookies_12['sIdRefreshToken']['value'] == ''
assert get_unix_timestamp(cookies_12['sAccessToken']['expires']) == 0
assert get_unix_timestamp(cookies_12['sRefreshToken']['expires']) == 0
assert get_unix_timestamp(cookies_12['sIdRefreshToken']['expires']) == 0
| 40.73913 | 122 | 0.679016 | 2,338 | 19,677 | 5.420017 | 0.078272 | 0.071812 | 0.033144 | 0.03125 | 0.896307 | 0.851563 | 0.771386 | 0.753314 | 0.753314 | 0.731613 | 0 | 0.030864 | 0.176704 | 19,677 | 482 | 123 | 40.823651 | 0.751358 | 0.03344 | 0 | 0.73445 | 0 | 0 | 0.224765 | 0 | 0 | 0 | 0 | 0 | 0.327751 | 1 | 0.038278 | false | 0 | 0.011962 | 0.009569 | 0.083732 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
8c42d4ce40dd3bee78c758216320bc1f1d610663 | 111 | py | Python | hquote.py | HyphenGroup/hive | e3cae481f4c25ce8956ac19179fb540e01ef7422 | [
"MIT"
] | 6 | 2021-03-18T20:44:20.000Z | 2021-09-17T19:21:31.000Z | hquote.py | HyphenGroup/hive | e3cae481f4c25ce8956ac19179fb540e01ef7422 | [
"MIT"
] | 7 | 2021-05-03T06:13:30.000Z | 2021-11-08T01:18:47.000Z | hquote.py | HyphenGroup/hive | e3cae481f4c25ce8956ac19179fb540e01ef7422 | [
"MIT"
] | 6 | 2021-03-18T20:44:19.000Z | 2021-09-29T22:46:34.000Z | import hive
def hqoute():
print('This Feature is coming soon!')
print('This Feature is coming soon!') | 27.75 | 43 | 0.675676 | 16 | 111 | 4.6875 | 0.625 | 0.24 | 0.426667 | 0.48 | 0.746667 | 0.746667 | 0 | 0 | 0 | 0 | 0 | 0 | 0.207207 | 111 | 4 | 43 | 27.75 | 0.852273 | 0 | 0 | 0.5 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.25 | 0 | 0.5 | 0.5 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
8c5ea6fe0b2269744543983cdfec35a89b90f383 | 140 | py | Python | Q14.py | devasheeshG/Project-file-XII_21-22 | 9ef06a09c36197a2a1a13305a745578c8cbf3578 | [
"MIT"
] | null | null | null | Q14.py | devasheeshG/Project-file-XII_21-22 | 9ef06a09c36197a2a1a13305a745578c8cbf3578 | [
"MIT"
] | null | null | null | Q14.py | devasheeshG/Project-file-XII_21-22 | 9ef06a09c36197a2a1a13305a745578c8cbf3578 | [
"MIT"
] | null | null | null | # Q14. Write a program to input a string end converts all upper case letter to lower case later and lower case letter to upper case letter. | 140 | 140 | 0.785714 | 26 | 140 | 4.230769 | 0.615385 | 0.272727 | 0.272727 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.017699 | 0.192857 | 140 | 1 | 140 | 140 | 0.955752 | 0.978571 | 0 | null | 0 | null | 0 | 0 | null | 0 | 0 | 0 | null | 1 | null | true | 0 | 0 | null | null | null | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4ff0cd643e7357173a2835d578cbc624a6f61b73 | 47,054 | py | Python | tests/core/tcp/test_tcp.py | pfeairheller/hio | 44669adb62c81357491f9f6157312bc1313b56cf | [
"Apache-2.0"
] | 1 | 2021-04-07T19:10:28.000Z | 2021-04-07T19:10:28.000Z | tests/core/tcp/test_tcp.py | pfeairheller/hio | 44669adb62c81357491f9f6157312bc1313b56cf | [
"Apache-2.0"
] | 4 | 2021-03-30T20:50:19.000Z | 2022-01-06T17:16:18.000Z | tests/core/tcp/test_tcp.py | pfeairheller/hio | 44669adb62c81357491f9f6157312bc1313b56cf | [
"Apache-2.0"
] | 3 | 2021-04-08T19:35:36.000Z | 2021-06-03T13:39:05.000Z | # -*- encoding: utf-8 -*-
"""
tests.core.test_tcp module
"""
import pytest
import sys
import os
import time
import socket
from collections import deque
import ssl
from hio.base import tyming, doing
from hio.core import tcp
def test_tcp_basic():
"""
Test the tcp connection between client and server
client send from and receive to port is ephemeral
server receive to and send from port is well known
Server listens on ist well know receive to and send from port
So incoming to server.
Source address is client host and client ephemeral port
Destination address is server host and server well known port
Each accept socket on server is a different duple of client source, server dest
all the dest are the same but each source is differenct so can route
based on the source.
Server routes incoming packets to accept socket port. The routing uses
the clients send from ephemeral port to do the routing to the
correct accept socket. All the accept sockets have the same local
port but a different remote IP host .
The servers accept socket port is the well known port so still receives to
and sends from its well know port.
The server sends to and receives from the clients ephemeral port number.
"""
tymist = tyming.Tymist()
client = tcp.Client(tymth=tymist.tymen())
assert client.tymeout == 0.0
assert isinstance(client.tymer, tyming.Tymer)
assert client.tymer.duration == client.tymeout
assert client.ha == ('127.0.0.1', 56000)
assert (client.host, client.port) == client.ha
assert client.hostname == client.host
assert client.cs == None
assert client.ca == (None, None)
assert client.accepted == False
assert client.cutoff == False
assert client.reconnectable == False
assert client.opened == False
assert client.bs == 8096
assert isinstance(client.txbs, bytearray)
assert isinstance(client.rxbs, bytearray)
assert client.wl == None
tymist = tyming.Tymist()
with tcp.openClient(tymth=tymist.tymen(), tymeout=0.5) as client:
assert client.tymeout == 0.5
assert client.ha == ('127.0.0.1', 56000)
assert client.opened == True
assert client.accepted == False
assert client.cutoff == False
assert client.reconnectable == False
assert client.opened == False
assert client.accepted == False
assert client.cutoff == False
server = tcp.Server()
assert server.tymeout == 1.0
assert server.ha == ('', 56000)
assert server.eha == ('127.0.0.1', 56000)
assert server.opened == False
assert server.bs == 8096
assert isinstance(server.axes, deque)
assert isinstance(server.ixes, dict)
assert server.wl == None
with tcp.openServer(tymth=tymist.tymen(), tymeout=1.5) as server:
assert server.ha == ('0.0.0.0', 56000)
assert server.eha == ('127.0.0.1', 56000)
assert server.opened == True
assert server.opened == False
tymist = tyming.Tymist()
with tcp.openServer(tymth=tymist.tymen(), ha=("", 6101)) as server, \
tcp.openClient(tymth=tymist.tymen(), ha=("127.0.0.1", 6101)) as beta, \
tcp.openClient(tymth=tymist.tymen(), ha=("127.0.0.1", 6101)) as gamma:
assert server.opened == True
assert beta.opened == True
assert gamma.opened == True
assert server.ha == ('0.0.0.0', 6101) # listen interface
assert server.eha == ('127.0.0.1', 6101) # normalized listen/accept external interface
assert beta.ha == ('127.0.0.1', 6101) # server listen/accept maybe sha (server host address)
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
assert gamma.accepted == False
assert gamma.connected == False
assert gamma.cutoff == False
# connect beta to server
while not (beta.connected and beta.ca in server.ixes):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.05)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname() # local connection address
assert beta.ha == beta.cs.getpeername() # remote connection address
assert server.eha == beta.ha # server external, beta external for server
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername() # ixBeta local beta remote
assert ixBeta.cs.getpeername() == beta.cs.getsockname() # ixBeta remote beta local
assert ixBeta.ca == beta.ca == ixBeta.cs.getpeername()
assert ixBeta.ha == beta.ha == ixBeta.cs.getsockname()
msgOut = b"Beta sends to Server"
count = beta.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = ixBeta.receive()
assert msgOut == msgIn
# receive without sending
msgIn = ixBeta.receive()
assert msgIn is None
# send multiple
msgOut1 = b"First Message"
count = beta.send(msgOut1)
assert count == len(msgOut1)
msgOut2 = b"Second Message"
count = beta.send(msgOut2)
assert count == len(msgOut2)
time.sleep(0.05)
msgIn = ixBeta.receive()
assert msgIn == msgOut1 + msgOut2
# send from server to beta
msgOut = b"Server sends to Beta"
count = ixBeta.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = beta.receive()
assert msgOut == msgIn
# receive without sending
msgIn = beta.receive()
assert msgIn is None
# build message too big to fit in buffer
size = beta.actualBufSizes()[0]
msgOut = bytearray()
count = 0
while (len(msgOut) <= size * 4):
msgOut.extend(b"%032x_" % (count)) # need to fix this
count += 1
assert len(msgOut) >= size * 4
msgIn = bytearray()
txbs = bytearray(msgOut) # make copy
size = 0
while len(msgIn) < len(msgOut):
#if size < len(msgOut):
#size += beta.send(msgOut[size:])
count = beta.send(txbs)
del txbs[:count]
size += count
time.sleep(0.05)
msgIn.extend(ixBeta.receive())
assert size == len(msgOut)
assert msgOut == msgIn
# gamma to server
while not (gamma.connected and gamma.ca in server.ixes):
gamma.serviceConnect()
server.serviceConnects()
time.sleep(0.05)
assert gamma.accepted == True
assert gamma.connected == True
assert gamma.cutoff == False
assert gamma.ca == gamma.cs.getsockname()
assert gamma.ha == gamma.cs.getpeername()
assert server.eha, gamma.ha
ixGamma = server.ixes[gamma.ca]
assert ixGamma.cs.getsockname() == gamma.cs.getpeername()
assert ixGamma.cs.getpeername() == gamma.cs.getsockname()
assert ixGamma.ca == gamma.ca
assert ixGamma.ha == gamma.ha
msgOut = b"Gamma sends to Server"
count = gamma.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = ixGamma.receive()
assert msgOut == msgIn
# receive without sending
msgIn = ixGamma.receive()
assert msgIn is None
# send from server to gamma
msgOut = b"Server sends to Gamma"
count = ixGamma.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = gamma.receive()
assert msgOut == msgIn
# recieve without sending
msgIn = gamma.receive()
assert msgIn is None
# close beta and then attempt to send
beta.close()
msgOut = b"Beta send on closed socket"
with pytest.raises(AttributeError):
count = beta.send(msgOut)
# attempt to receive on closed socket
with pytest.raises(AttributeError):
msgIn = beta.receive()
# read on server after closed beta
msgIn = ixBeta.receive()
assert msgIn == b''
# send on server after closed beta
msgOut = b"Servers sends to Beta after close"
count = ixBeta.send(msgOut)
assert count == len(msgOut) #apparently works
# close ixBeta manually
ixBeta.close()
del server.ixes[ixBeta.ca]
time.sleep(0.05)
#after close no socket .cs so can't receive
with pytest.raises(AttributeError):
msgIn = ixBeta.receive()
assert ixBeta.cutoff == True
# send on gamma to servver first then shutdown gamma sends
msgOut = b"Gamma sends to server"
count = gamma.send(msgOut)
assert count == len(msgOut)
gamma.shutdownSend()
time.sleep(0.05)
msgIn = ixGamma.receive()
assert msgOut == msgIn # send before shutdown worked
msgIn = ixGamma.receive()
assert msgIn == b'' # gamma shutdown detected, not None
assert ixGamma.cutoff == True
# send from server to gamma first then shutdown server send
msgOut = b"Server sends to Gamma"
count = ixGamma.send(msgOut)
assert count == len(msgOut)
ixGamma.shutdown() # shutdown server connection to gamma
time.sleep(0.05)
msgIn = gamma.receive()
assert msgOut == msgIn
msgIn = gamma.receive()
if 'linux' in sys.platform:
assert msgIn == b'' # server shutdown detected not None
assert gamma.cutoff == True
else:
assert msgIn == None # server shutdown not detected
assert gamma.cutoff == False
time.sleep(0.05)
msgIn = gamma.receive()
if 'linux' in sys.platform:
assert msgIn == b'' # server shutdown detected not None
assert gamma.cutoff == True
else:
assert msgIn == None # server shutdown not detected
assert gamma.cutoff == False
ixGamma.close() # close server connection to gamma
del server.ixes[ixGamma.ca]
time.sleep(0.05)
msgIn = gamma.receive()
assert msgIn == b'' # server close is detected
assert gamma.cutoff == True
# reopen beta
assert beta.reopen() == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# reconnect beta to server
while not (beta.connected and beta.ca in server.ixes):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.05)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
assert server.eha == beta.ha
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to server"
count = beta.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = ixBeta.receive()
assert msgOut == msgIn
# send from server to beta
msgOut = b"Server sends to Beta"
count = ixBeta.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = beta.receive()
assert msgOut == msgIn
# send from server to beta then shutdown sever and attempt to send again
msgOut1 = b"Server sends to Beta"
count = ixBeta.send(msgOut)
assert count == len(msgOut1)
ixBeta.shutdownSend()
msgOut2 = b"Server send again after server shutdowns socket"
with pytest.raises(OSError) as ex:
count = ixBeta.send(msgOut)
assert ex.typename == 'BrokenPipeError'
time.sleep(0.05)
msgIn = beta.receive()
assert msgOut1 == msgIn
msgIn = beta.receive()
assert msgIn == b'' # beta detects shutdown socket
assert beta.cutoff == True
# send from beta to server then shutdown beta
msgOut = b"Beta sends to server"
count = beta.send(msgOut)
assert count == len(msgOut)
beta.shutdown()
time.sleep(0.05)
msgIn = ixBeta.receive()
assert msgOut == msgIn
time.sleep(0.05)
msgIn = ixBeta.receive()
if 'linux' in sys.platform:
assert ixBeta.cutoff == True
assert msgIn == b'' # server does detect shutdown
else:
assert ixBeta.cutoff == False
assert msgIn == None # server does not detect shutdown
beta.close()
time.sleep(0.05)
msgIn = ixBeta.receive()
assert msgIn == b'' # server detects closed socket
ixBeta.close()
del server.ixes[ixBeta.ca]
# reopen gamma
assert gamma.reopen() == True
assert gamma.accepted == False
assert gamma.connected == False
assert gamma.cutoff == False
# reconnect gamma to server
while not (gamma.connected and gamma.ca in server.ixes):
gamma.serviceConnect()
server.serviceConnects()
time.sleep(0.05)
assert gamma.accepted == True
assert gamma.connected == True
assert gamma.cutoff == False
assert gamma.ca == gamma.cs.getsockname()
assert gamma.ha == gamma.cs.getpeername()
assert server.eha == gamma.ha
ixGamma = server.ixes[gamma.ca]
assert ixGamma.cs.getsockname() == gamma.cs.getpeername()
assert ixGamma.cs.getpeername() == gamma.cs.getsockname()
assert ixGamma.ca == gamma.ca
assert ixGamma.ha == gamma.ha
msgOut = b"Gamma sends to server"
count = gamma.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = ixGamma.receive()
assert msgOut == msgIn
# close both sides and reopen Gamma
gamma.close()
time.sleep(0.05)
msgIn = ixGamma.receive()
assert ixGamma.cutoff ==True # closed on other end
assert msgIn == b'' # server detects close
ixGamma.close()
del server.ixes[ixGamma.ca]
# reopen gamma
assert gamma.reopen() == True
assert gamma.accepted == False
assert gamma.connected == False
assert gamma.cutoff == False
# reconnect gamma to server
while not (gamma.connected and gamma.ca in server.ixes):
gamma.serviceConnect()
server.serviceConnects()
time.sleep(0.05)
assert gamma.accepted == True
assert gamma.connected == True
assert gamma.cutoff == False
assert gamma.ca == gamma.cs.getsockname()
assert gamma.ha == gamma.cs.getpeername()
assert server.eha == gamma.ha
ixGamma = server.ixes[gamma.ca]
assert ixGamma.cs.getsockname() == gamma.cs.getpeername()
assert ixGamma.cs.getpeername() == gamma.cs.getsockname()
assert ixGamma.ca == gamma.ca
assert ixGamma.ha == gamma.ha
# send from server to gamma
msgOut = b"Server sends to Gamma"
count = ixGamma.send(msgOut)
assert count == len(msgOut)
time.sleep(0.05)
msgIn = gamma.receive()
assert msgOut == msgIn
ixGamma.close()
del server.ixes[ixGamma.ca]
time.sleep(0.05)
msgIn = gamma.receive()
assert msgIn == b'' # gamma detects close
assert gamma.cutoff == True
assert beta.opened == False
assert gamma.opened == False
assert server.opened == False
"""Done Test"""
def test_tcp_service():
"""
Test Classes tcp service methods
"""
tymist = tyming.Tymist()
with tcp.openServer(tymth=tymist.tymen(), ha=("", 6101)) as server, \
tcp.openClient(tymth=tymist.tymen(), ha=("127.0.0.1", 6101)) as beta:
assert server.opened == True
assert server.ha == ('0.0.0.0', 6101)
assert server.eha == ('127.0.0.1', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# connect beta to server
while not (beta.connected and beta.ca in server.ixes):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.05)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername() == server.eha
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut1 = b"Beta sends to Server first"
beta.tx(msgOut1)
while not ixBeta.rxbs and beta.txbs:
beta.serviceSends()
time.sleep(0.05)
server.serviceReceivesAllIx()
time.sleep(0.05)
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut1
offset = len(ixBeta.rxbs) # offset into .rxbs of first message
# send multiple additional messages
msgOut2 = b"Beta sends to Server second"
beta.tx(msgOut2)
msgOut3 = b"Beta sends to Server third"
beta.tx(msgOut3)
while len(ixBeta.rxbs) < len(msgOut1) + len(msgOut2) + len(msgOut3):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.05)
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut1 + msgOut2 + msgOut3
ixBeta.clearRxbs() # clear out the receive buffer
# build message too big to fit in buffer
size = beta.actualBufSizes()[0]
msgOutBig = bytearray()
count = 0
while (len(msgOutBig) <= size * 4):
msgOutBig.extend(b"%032x_" % (count))
count += 1
assert len(msgOutBig) >= size * 4
beta.tx(msgOutBig)
while len(ixBeta.rxbs) < len(msgOutBig):
beta.serviceSends()
time.sleep(0.05)
server.serviceReceivesAllIx()
time.sleep(0.05)
msgIn = bytes(ixBeta.rxbs)
ixBeta.clearRxbs()
assert msgIn == msgOutBig
# send from server to beta
msgOut = b"Server sends to Beta"
ixBeta.tx(msgOut)
while len(beta.rxbs) < len(msgOut):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.05)
msgIn = bytes(beta.rxbs)
beta.clearRxbs()
assert msgIn == msgOut
# send big from server to beta
ixBeta.tx(msgOutBig)
while len(beta.rxbs) < len(msgOutBig):
server.serviceSendsAllIx()
time.sleep(0.05)
beta.serviceReceives()
time.sleep(0.05)
msgIn = bytes(beta.rxbs)
beta.clearRxbs()
assert msgIn == msgOutBig
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_client_auto_reconnect():
"""
Test client auto reconnect when .reconnectable
"""
tymist = tyming.Tymist(tock=0.05)
with tcp.openServer(tymth=tymist.tymen(), ha=("", 6101)) as server, \
tcp.openClient(tymth=tymist.tymen(), tymeout=0.2, reconnectable=True,
ha=("127.0.0.1", 6101)) as beta:
# close server
server.close()
assert server.opened == False
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
assert beta.reconnectable == True
# attempt to connect beta to serve while server down (closed)
while tymist.tyme <= 0.25:
beta.serviceConnect()
tymist.tick()
time.sleep(0.05)
assert beta.accepted == False
assert beta.connected == False
assert server.reopen() == True
assert server.ha == ('0.0.0.0', 6101)
assert server.eha== ('127.0.0.1', 6101)
assert beta.ha == server.eha
# attempt to connect beta to server while server up (opened)
while not (beta.connected and beta.ca in server.ixes):
beta.serviceConnect()
server.serviceConnects()
tymist.tick() # advances clients reconnect retry tymer
time.sleep(0.05)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
assert server.eha == beta.ha
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server on reconnect"
beta.tx(msgOut)
while not ixBeta.rxbs and beta.txbs:
beta.serviceSends()
time.sleep(0.05)
server.serviceReceivesAllIx()
time.sleep(0.05)
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
index = len(ixBeta.rxbs)
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def localTestCertDirPath():
"""
Returns local testing directory path for TLS certs
"""
localDirPath = os.path.dirname(
os.path.abspath(
sys.modules.get(__name__).__file__))
return(os.path.join(localDirPath, 'certs'))
def test_tcp_tls_default_context():
"""
Test tcp connection with tls default context
"""
certDirPath = localTestCertDirPath()
assert os.path.exists(certDirPath)
#serverKeypath = '/etc/pki/tls/certs/server_key.pem' # local server private key
#serverCertpath = '/etc/pki/tls/certs/server_cert.pem' # local server public cert
#clientCafilepath = '/etc/pki/tls/certs/client.pem' # remote client public cert
#clientKeypath = '/etc/pki/tls/certs/client_key.pem' # local client private key
#clientCertpath = '/etc/pki/tls/certs/client_cert.pem' # local client public cert
#serverCafilepath = '/etc/pki/tls/certs/server.pem' # remote server public cert
serverKeyPath = os.path.join(certDirPath, 'server_key.pem') # local server private key
serverCertPath = os.path.join(certDirPath, 'server_cert.pem') # local server public cert
clientCaPath = os.path.join(certDirPath, 'client.pem') # remote client public cert
clientKeyPath = os.path.join(certDirPath, 'client_key.pem') # local client private key
clientCertPath = os.path.join(certDirPath, 'client_cert.pem') # local client public cert
serverCaPath = os.path.join(certDirPath, 'server.pem') # remote server public cert
assert os.path.exists(serverKeyPath)
assert os.path.exists(serverCertPath)
assert os.path.exists(clientCaPath)
assert os.path.exists(clientKeyPath)
assert os.path.exists(clientCertPath)
assert os.path.exists(serverCaPath)
serverCertCommonName = 'localhost' # match hostname uses servers's cert commonname
tymist = tyming.Tymist()
with tcp.openServer(cls=tcp.ServerTls,
tymth=tymist.tymen(),
ha=("", 6101),
bs=16192,
keypath=serverKeyPath,
certpath=serverCertPath,
cafilepath=clientCaPath) as server, \
tcp.openClient(cls=tcp.ClientTls,
tymth=tymist.tymen(),
ha=("127.0.0.1", 6101),
bs=16192,
certedhost=serverCertCommonName,
keypath=clientKeyPath,
certpath=clientCertPath,
cafilepath=serverCaPath,) as beta:
assert server.opened == True
assert server.eha == ('127.0.0.1', 6101)
assert server.ha == ('0.0.0.0', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# Connect beta to server
while not(beta.connected and len(server.ixes) >= 1):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.01)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server\n"
beta.tx(msgOut)
while not( not beta.txbs and ixBeta.rxbs):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.01)
time.sleep(0.05)
server.serviceReceivesAllIx()
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
ixBeta.clearRxbs()
msgOut = b'Server sends to Beta\n'
ixBeta.tx(msgOut)
while not (not ixBeta.txbs and beta.rxbs):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.01)
msgIn = bytes(beta.rxbs)
assert msgIn == msgOut
beta.clearRxbs()
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_tcp_tls_verify_both():
"""
Test TCP TLS client server connection with verify certs for both client and server
"""
certDirPath = localTestCertDirPath()
assert os.path.exists(certDirPath)
serverKeyPath = os.path.join(certDirPath, 'server_key.pem') # local server private key
serverCertPath = os.path.join(certDirPath, 'server_cert.pem') # local server public cert
clientCaPath = os.path.join(certDirPath, 'client.pem') # remote client public cert
clientKeyPath = os.path.join(certDirPath, 'client_key.pem') # local client private key
clientCertPath = os.path.join(certDirPath, 'client_cert.pem') # local client public cert
serverCaPath = os.path.join(certDirPath, 'server.pem') # remote server public cert
assert os.path.exists(serverKeyPath)
assert os.path.exists(serverCertPath)
assert os.path.exists(clientCaPath)
assert os.path.exists(clientKeyPath)
assert os.path.exists(clientCertPath)
assert os.path.exists(serverCaPath)
serverCertCommonName = 'localhost' # match hostname uses servers's cert commonname
tymist = tyming.Tymist()
with tcp.openServer(cls=tcp.ServerTls,
tymth=tymist.tymen(),
ha=("", 6101),
bs=16192,
keypath=serverKeyPath,
certpath=serverCertPath,
cafilepath=clientCaPath,
certify=ssl.CERT_REQUIRED,) as server, \
tcp.openClient(cls=tcp.ClientTls,
tymth=tymist.tymen(),
ha=("127.0.0.1", 6101),
bs=16192,
certedhost=serverCertCommonName,
keypath=clientKeyPath,
certpath=clientCertPath,
cafilepath=serverCaPath,
certify=ssl.CERT_REQUIRED,
hostify=True,) as beta:
assert server.opened == True
assert server.eha == ('127.0.0.1', 6101)
assert server.ha == ('0.0.0.0', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# Connect beta to server
while not(beta.connected and len(server.ixes) >= 1):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.01)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server\n"
beta.tx(msgOut)
while not( not beta.txbs and ixBeta.rxbs):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.01)
time.sleep(0.05)
server.serviceReceivesAllIx()
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
ixBeta.clearRxbs()
msgOut = b'Server sends to Beta\n'
ixBeta.tx(msgOut)
while not (not ixBeta.txbs and beta.rxbs):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.01)
msgIn = bytes(beta.rxbs)
assert msgIn == msgOut
beta.clearRxbs()
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_tcp_tls_verify_client():
"""
Test TCP TLS client server connection with verify certs for client not server
"""
certDirPath = localTestCertDirPath()
assert os.path.exists(certDirPath)
serverKeyPath = os.path.join(certDirPath, 'server_key.pem') # local server private key
serverCertPath = os.path.join(certDirPath, 'server_cert.pem') # local server public cert
clientCaPath = os.path.join(certDirPath, 'client.pem') # remote client public cert
clientKeyPath = os.path.join(certDirPath, 'client_key.pem') # local client private key
clientCertPath = os.path.join(certDirPath, 'client_cert.pem') # local client public cert
serverCaPath = os.path.join(certDirPath, 'server.pem') # remote server public cert
assert os.path.exists(serverKeyPath)
assert os.path.exists(serverCertPath)
assert os.path.exists(clientCaPath)
assert os.path.exists(clientKeyPath)
assert os.path.exists(clientCertPath)
assert os.path.exists(serverCaPath)
serverCertCommonName = 'localhost' # match hostname uses servers's cert commonname
tymist = tyming.Tymist()
with tcp.openServer(cls=tcp.ServerTls,
tymth=tymist.tymen(),
ha=("", 6101),
bs=16192,
keypath=serverKeyPath,
certpath=serverCertPath,
cafilepath=clientCaPath,
certify=ssl.CERT_REQUIRED,) as server, \
tcp.openClient(cls=tcp.ClientTls,
tymth=tymist.tymen(),
ha=("127.0.0.1", 6101), bs=16192,
certedhost=serverCertCommonName,
keypath=clientKeyPath,
certpath=clientCertPath,
cafilepath=serverCaPath,
certify=ssl.CERT_NONE,
hostify=False,) as beta:
assert server.opened == True
assert server.eha == ('127.0.0.1', 6101)
assert server.ha == ('0.0.0.0', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# Connect beta to server
while not(beta.connected and len(server.ixes) >= 1):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.01)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server\n"
beta.tx(msgOut)
while not( not beta.txbs and ixBeta.rxbs):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.01)
time.sleep(0.05)
server.serviceReceivesAllIx()
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
ixBeta.clearRxbs()
msgOut = b'Server sends to Beta\n'
ixBeta.tx(msgOut)
while not (not ixBeta.txbs and beta.rxbs):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.01)
msgIn = bytes(beta.rxbs)
assert msgIn == msgOut
beta.clearRxbs()
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_tcp_tls_verify_server():
"""
Test TCP TLS client server connection with verify certs for server not client
"""
certDirPath = localTestCertDirPath()
assert os.path.exists(certDirPath)
serverKeyPath = os.path.join(certDirPath, 'server_key.pem') # local server private key
serverCertPath = os.path.join(certDirPath, 'server_cert.pem') # local server public cert
clientCaPath = os.path.join(certDirPath, 'client.pem') # remote client public cert
clientKeyPath = os.path.join(certDirPath, 'client_key.pem') # local client private key
clientCertPath = os.path.join(certDirPath, 'client_cert.pem') # local client public cert
serverCaPath = os.path.join(certDirPath, 'server.pem') # remote server public cert
assert os.path.exists(serverKeyPath)
assert os.path.exists(serverCertPath)
assert os.path.exists(clientCaPath)
assert os.path.exists(clientKeyPath)
assert os.path.exists(clientCertPath)
assert os.path.exists(serverCaPath)
serverCertCommonName = 'localhost' # match hostname uses servers's cert commonname
tymist = tyming.Tymist()
with tcp.openServer(cls=tcp.ServerTls,
tymth=tymist.tymen(),
ha=("", 6101),
bs=16192,
keypath=serverKeyPath,
certpath=serverCertPath,
cafilepath=clientCaPath,
certify=ssl.CERT_NONE ,) as server, \
tcp.openClient(cls=tcp.ClientTls,
tymth=tymist.tymen(),
ha=("127.0.0.1", 6101),
bs=16192,
certedhost=serverCertCommonName,
keypath=clientKeyPath,
certpath=clientCertPath,
cafilepath=serverCaPath,
certify=ssl.CERT_REQUIRED,
hostify=True,) as beta:
assert server.opened == True
assert server.eha == ('127.0.0.1', 6101)
assert server.ha == ('0.0.0.0', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# Connect beta to server
while not(beta.connected and len(server.ixes) >= 1):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.01)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server\n"
beta.tx(msgOut)
while not( not beta.txbs and ixBeta.rxbs):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.01)
time.sleep(0.05)
server.serviceReceivesAllIx()
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
ixBeta.clearRxbs()
msgOut = b'Server sends to Beta\n'
ixBeta.tx(msgOut)
while not (not ixBeta.txbs and beta.rxbs):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.01)
msgIn = bytes(beta.rxbs)
assert msgIn == msgOut
beta.clearRxbs()
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_tcp_tls_verify_neither():
"""
Test TCP TLS client server connection with verify certs for neither server nor client
"""
certDirPath = localTestCertDirPath()
assert os.path.exists(certDirPath)
serverKeyPath = os.path.join(certDirPath, 'server_key.pem') # local server private key
serverCertPath = os.path.join(certDirPath, 'server_cert.pem') # local server public cert
clientCaPath = os.path.join(certDirPath, 'client.pem') # remote client public cert
clientKeyPath = os.path.join(certDirPath, 'client_key.pem') # local client private key
clientCertPath = os.path.join(certDirPath, 'client_cert.pem') # local client public cert
serverCaPath = os.path.join(certDirPath, 'server.pem') # remote server public cert
assert os.path.exists(serverKeyPath)
assert os.path.exists(serverCertPath)
assert os.path.exists(clientCaPath)
assert os.path.exists(clientKeyPath)
assert os.path.exists(clientCertPath)
assert os.path.exists(serverCaPath)
serverCertCommonName = 'localhost' # match hostname uses servers's cert commonname
tymist = tyming.Tymist()
with tcp.openServer(cls=tcp.ServerTls,
tymth=tymist.tymen(),
ha=("", 6101),
bs=16192,
keypath=serverKeyPath,
certpath=serverCertPath,
cafilepath=clientCaPath,
certify=ssl.CERT_NONE ,) as server, \
tcp.openClient(cls=tcp.ClientTls,
tymth=tymist.tymen(),
ha=("127.0.0.1", 6101),
bs=16192,
certedhost=serverCertCommonName,
keypath=clientKeyPath,
certpath=clientCertPath,
cafilepath=serverCaPath,
certify=ssl.CERT_NONE,
hostify=False,) as beta:
assert server.opened == True
assert server.eha == ('127.0.0.1', 6101)
assert server.ha == ('0.0.0.0', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# Connect beta to server
while not(beta.connected and len(server.ixes) >= 1):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.01)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server\n"
beta.tx(msgOut)
while not( not beta.txbs and ixBeta.rxbs):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.01)
time.sleep(0.05)
server.serviceReceivesAllIx()
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
ixBeta.clearRxbs()
msgOut = b'Server sends to Beta\n'
ixBeta.tx(msgOut)
while not (not ixBeta.txbs and beta.rxbs):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.01)
msgIn = bytes(beta.rxbs)
assert msgIn == msgOut
beta.clearRxbs()
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_tcp_tls_verify_both_tlsv12():
"""
Test TCP TLS client server connection with verify certs for both client and server
"""
certDirPath = localTestCertDirPath()
assert os.path.exists(certDirPath)
serverKeyPath = os.path.join(certDirPath, 'server_key.pem') # local server private key
serverCertPath = os.path.join(certDirPath, 'server_cert.pem') # local server public cert
clientCaPath = os.path.join(certDirPath, 'client.pem') # remote client public cert
clientKeyPath = os.path.join(certDirPath, 'client_key.pem') # local client private key
clientCertPath = os.path.join(certDirPath, 'client_cert.pem') # local client public cert
serverCaPath = os.path.join(certDirPath, 'server.pem') # remote server public cert
assert os.path.exists(serverKeyPath)
assert os.path.exists(serverCertPath)
assert os.path.exists(clientCaPath)
assert os.path.exists(clientKeyPath)
assert os.path.exists(clientCertPath)
assert os.path.exists(serverCaPath)
serverCertCommonName = 'localhost' # match hostname uses servers's cert commonname
tymist = tyming.Tymist()
with tcp.openServer(cls=tcp.ServerTls,
tymth=tymist.tymen(),
ha=("", 6101),
bs=16192,
keypath=serverKeyPath,
certpath=serverCertPath,
cafilepath=clientCaPath,
certify=ssl.CERT_REQUIRED,
version=ssl.PROTOCOL_TLSv1_2,) as server, \
tcp.openClient(cls=tcp.ClientTls,
tymth=tymist.tymen(),
ha=("127.0.0.1", 6101),
bs=16192,
certedhost=serverCertCommonName,
keypath=clientKeyPath,
certpath=clientCertPath,
cafilepath=serverCaPath,
certify=ssl.CERT_REQUIRED,
hostify=True,
version=ssl.PROTOCOL_TLSv1_2,) as beta:
assert server.opened == True
assert server.eha == ('127.0.0.1', 6101)
assert server.ha == ('0.0.0.0', 6101)
assert beta.opened == True
assert beta.accepted == False
assert beta.connected == False
assert beta.cutoff == False
# Connect beta to server
while not(beta.connected and len(server.ixes) >= 1):
beta.serviceConnect()
server.serviceConnects()
time.sleep(0.01)
assert beta.accepted == True
assert beta.connected == True
assert beta.cutoff == False
assert beta.ca == beta.cs.getsockname()
assert beta.ha == beta.cs.getpeername()
ixBeta = server.ixes[beta.ca]
assert ixBeta.cs.getsockname() == beta.cs.getpeername()
assert ixBeta.cs.getpeername() == beta.cs.getsockname()
assert ixBeta.ca == beta.ca
assert ixBeta.ha == beta.ha
msgOut = b"Beta sends to Server\n"
beta.tx(msgOut)
while not( not beta.txbs and ixBeta.rxbs):
beta.serviceSends()
server.serviceReceivesAllIx()
time.sleep(0.01)
time.sleep(0.05)
server.serviceReceivesAllIx()
msgIn = bytes(ixBeta.rxbs)
assert msgIn == msgOut
ixBeta.clearRxbs()
msgOut = b'Server sends to Beta\n'
ixBeta.tx(msgOut)
while not (not ixBeta.txbs and beta.rxbs):
server.serviceSendsAllIx()
beta.serviceReceives()
time.sleep(0.01)
msgIn = bytes(beta.rxbs)
assert msgIn == msgOut
beta.clearRxbs()
assert beta.opened == False
assert server.opened == False
"""Done Test"""
def test_server_client_doers():
"""
Test ServerDoer ClientDoer classes
"""
tock = 0.03125
ticks = 16
limit = ticks * tock
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.5
assert doist.doers == []
port = 6120
server = tcp.Server(host="", port=port)
# client needs tymth in order to init its .tymer
client = tcp.Client(tymth=doist.tymen(), host="localhost", port=port)
assert client.tyme == doist.tyme
serdoer = tcp.ServerDoer(tymth=doist.tymen(), server=server)
assert serdoer.server == server
assert serdoer.tyme == serdoer.server.tyme == doist.tyme
clidoer = tcp.ClientDoer(tymth=doist.tymen(), client=client)
assert clidoer.client == client
assert clidoer.tyme == clidoer.client.tyme == doist.tyme
assert serdoer.tock == 0.0 # ASAP
assert clidoer.tock == 0.0 # ASAP
doers = [serdoer, clidoer]
msgTx = b"Hello me maties!"
clidoer.client.tx(msgTx)
doist.do(doers=doers)
assert doist.tyme == limit
assert server.opened == False
assert client.opened == False
assert not client.txbs
ca, ix = list(server.ixes.items())[0]
msgRx = bytes(ix.rxbs)
assert msgRx == msgTx
"""End Test """
def test_echo_server_client_doers():
"""
Test EchoServerDoer ClientDoer classes
"""
tock = 0.03125
ticks = 16
limit = ticks * tock
doist = doing.Doist(tock=tock, real=True, limit=limit)
assert doist.tyme == 0.0 # on next cycle
assert doist.tock == tock == 0.03125
assert doist.real == True
assert doist.limit == limit == 0.5
assert doist.doers == []
port = 6120
server = tcp.Server(host="", port=port)
client = tcp.Client(tymth=doist.tymen(), host="localhost", port=port)
serdoer = tcp.EchoServerDoer(tymth=doist.tymen(), server=server)
assert serdoer.server == server
assert serdoer.tyme == serdoer.server.tyme == doist.tyme
clidoer = tcp.ClientDoer(tymth=doist.tymen(), client=client)
assert clidoer.client == client
assert clidoer.tyme == clidoer.client.tyme == doist.tyme
assert serdoer.tock == 0.0 # ASAP
assert clidoer.tock == 0.0 # ASAP
doers = [serdoer, clidoer]
msgTx = b"Hello me maties!"
clidoer.client.tx(msgTx)
doist.do(doers=doers)
assert doist.tyme == limit
assert server.opened == False
assert client.opened == False
assert not client.txbs
msgEx = bytes(client.rxbs) # echoed back message
assert msgEx == msgTx
ca, ix = list(server.ixes.items())[0]
assert bytes(ix.rxbs) == b"" # empty server rxbs becaue echoed
"""End Test """
if __name__ == "__main__":
test_server_client()
| 34.121827 | 102 | 0.599226 | 5,427 | 47,054 | 5.176709 | 0.062281 | 0.037375 | 0.022425 | 0.019221 | 0.831743 | 0.799495 | 0.788318 | 0.76753 | 0.751584 | 0.746067 | 0 | 0.022673 | 0.296999 | 47,054 | 1,378 | 103 | 34.146589 | 0.826627 | 0.121626 | 0 | 0.862205 | 0 | 0 | 0.0414 | 0 | 0 | 0 | 0 | 0 | 0.413386 | 1 | 0.011811 | false | 0 | 0.008858 | 0 | 0.020669 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
8b09b989c5fafdfbe80b88fa68cd8f369d9e5741 | 30 | py | Python | template/src/example.py | mobilityhouse/dojo | 0d2d5fbe1ea240da7343f95a253b2aa61f45aed2 | [
"BSD-3-Clause"
] | 1 | 2020-02-28T21:35:44.000Z | 2020-02-28T21:35:44.000Z | template/src/example.py | mobilityhouse/dojo | 0d2d5fbe1ea240da7343f95a253b2aa61f45aed2 | [
"BSD-3-Clause"
] | null | null | null | template/src/example.py | mobilityhouse/dojo | 0d2d5fbe1ea240da7343f95a253b2aa61f45aed2 | [
"BSD-3-Clause"
] | null | null | null | def method():
return None
| 10 | 15 | 0.633333 | 4 | 30 | 4.75 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.266667 | 30 | 2 | 16 | 15 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0.5 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
8c7d4c81be97817a9534532ee350dd6efcf9f263 | 213 | py | Python | python/src/tensor/autograd/__init__.py | dawidkski/space | d1536ef1e8235c85fe7b83e43eca579ecb17bab4 | [
"MIT"
] | 3 | 2020-12-17T15:19:11.000Z | 2021-08-04T00:00:08.000Z | python/src/tensor/autograd/__init__.py | dawidkski/space | d1536ef1e8235c85fe7b83e43eca579ecb17bab4 | [
"MIT"
] | 29 | 2020-11-09T17:54:18.000Z | 2021-07-08T23:31:28.000Z | python/src/tensor/autograd/__init__.py | dawidkski/space | d1536ef1e8235c85fe7b83e43eca579ecb17bab4 | [
"MIT"
] | null | null | null | from .autograd import Variable, Op
from .autograd import matmul, add, log, reshape, var, print_graph
from . import viz
__all__ = ["Variable", "Op", "matmul", "add", "log", "reshape", "var", "print_graph", "viz"]
| 35.5 | 92 | 0.680751 | 29 | 213 | 4.793103 | 0.482759 | 0.172662 | 0.258993 | 0.273381 | 0.460432 | 0.460432 | 0.460432 | 0 | 0 | 0 | 0 | 0 | 0.140845 | 213 | 5 | 93 | 42.6 | 0.759563 | 0 | 0 | 0 | 0 | 0 | 0.215962 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.75 | 0 | 0.75 | 0.5 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 1 | 0 | 7 |
8ca0959c4cfdcebf242114a3d0536a6a6ec5a566 | 4,627 | py | Python | dashboard/migrations/0004_stock_stockdailyfifteen_stockdailyfive_stockdailythirty_stockmonthly_stockweekly.py | MelroyFrank/StockMarketDashboard | 76a6b1fc89e5f2469b5f7bdfffdebbf5fb897f9e | [
"MIT"
] | 1 | 2021-09-21T22:52:46.000Z | 2021-09-21T22:52:46.000Z | dashboard/migrations/0004_stock_stockdailyfifteen_stockdailyfive_stockdailythirty_stockmonthly_stockweekly.py | MelroyFrank/StockMarketDashboard | 76a6b1fc89e5f2469b5f7bdfffdebbf5fb897f9e | [
"MIT"
] | null | null | null | dashboard/migrations/0004_stock_stockdailyfifteen_stockdailyfive_stockdailythirty_stockmonthly_stockweekly.py | MelroyFrank/StockMarketDashboard | 76a6b1fc89e5f2469b5f7bdfffdebbf5fb897f9e | [
"MIT"
] | 1 | 2019-04-10T05:18:49.000Z | 2019-04-10T05:18:49.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-11-11 13:12
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('dashboard', '0003_search_timestamp'),
]
operations = [
migrations.CreateModel(
name='Stock',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
],
),
migrations.CreateModel(
name='StockDailyFifteen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opening', models.DecimalField(decimal_places=8, max_digits=10)),
('closing', models.DecimalField(decimal_places=8, max_digits=10)),
('high', models.DecimalField(decimal_places=8, max_digits=10)),
('low', models.DecimalField(decimal_places=8, max_digits=10)),
('volume', models.DecimalField(decimal_places=8, max_digits=10)),
('timestamp', models.DateTimeField()),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Stock')),
],
),
migrations.CreateModel(
name='StockDailyFive',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opening', models.DecimalField(decimal_places=8, max_digits=10)),
('closing', models.DecimalField(decimal_places=8, max_digits=10)),
('high', models.DecimalField(decimal_places=8, max_digits=10)),
('low', models.DecimalField(decimal_places=8, max_digits=10)),
('volume', models.DecimalField(decimal_places=8, max_digits=10)),
('timestamp', models.DateTimeField()),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Stock')),
],
),
migrations.CreateModel(
name='StockDailyThirty',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opening', models.DecimalField(decimal_places=8, max_digits=10)),
('closing', models.DecimalField(decimal_places=8, max_digits=10)),
('high', models.DecimalField(decimal_places=8, max_digits=10)),
('low', models.DecimalField(decimal_places=8, max_digits=10)),
('volume', models.DecimalField(decimal_places=8, max_digits=10)),
('timestamp', models.DateTimeField()),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Stock')),
],
),
migrations.CreateModel(
name='StockMonthly',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opening', models.DecimalField(decimal_places=8, max_digits=10)),
('closing', models.DecimalField(decimal_places=8, max_digits=10)),
('high', models.DecimalField(decimal_places=8, max_digits=10)),
('low', models.DecimalField(decimal_places=8, max_digits=10)),
('volume', models.DecimalField(decimal_places=8, max_digits=10)),
('timestamp', models.DateTimeField()),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Stock')),
],
),
migrations.CreateModel(
name='StockWeekly',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opening', models.DecimalField(decimal_places=8, max_digits=10)),
('closing', models.DecimalField(decimal_places=8, max_digits=10)),
('high', models.DecimalField(decimal_places=8, max_digits=10)),
('low', models.DecimalField(decimal_places=8, max_digits=10)),
('volume', models.DecimalField(decimal_places=8, max_digits=10)),
('timestamp', models.DateTimeField()),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='dashboard.Stock')),
],
),
]
| 51.988764 | 114 | 0.593905 | 466 | 4,627 | 5.723176 | 0.145923 | 0.168729 | 0.234346 | 0.290589 | 0.858268 | 0.858268 | 0.858268 | 0.858268 | 0.858268 | 0.858268 | 0 | 0.029274 | 0.261725 | 4,627 | 88 | 115 | 52.579545 | 0.751464 | 0.014912 | 0 | 0.802469 | 1 | 0 | 0.089572 | 0.00461 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.037037 | 0 | 0.074074 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
8cc135e10e1ccc84cc9015850a97d5e5b52bb8c1 | 59,774 | py | Python | pypureclient/flashblade/FB_2_2/api/arrays_api.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 14 | 2018-12-07T18:30:27.000Z | 2022-02-22T09:12:33.000Z | pypureclient/flashblade/FB_2_2/api/arrays_api.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 28 | 2019-09-17T21:03:52.000Z | 2022-03-29T22:07:35.000Z | pypureclient/flashblade/FB_2_2/api/arrays_api.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 15 | 2020-06-11T15:50:08.000Z | 2022-03-21T09:27:25.000Z | # coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.2, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re
# python 2 and python 3 compatibility library
import six
from typing import List, Optional
from .. import models
class ArraysApi(object):
def __init__(self, api_client):
self.api_client = api_client
def api22_arrays_eula_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.EulaGetResponse
"""GET arrays/eula
List the End User Agreement and signature.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_eula_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: EulaGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api22_arrays_eula_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api22_arrays_eula_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/eula', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EulaGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_eula_patch_with_http_info(
self,
eula=None, # type: models.Eula
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.EulaResponse
"""PATCH arrays/eula
Modifies the signature on the End User Agreement.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_eula_patch_with_http_info(eula, async_req=True)
>>> result = thread.get()
:param Eula eula: (required)
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: EulaResponse
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'eula' is set
if eula is None:
raise TypeError("Missing the required parameter `eula` when calling `api22_arrays_eula_patch`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'eula' in params:
body_params = params['eula']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/eula', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EulaResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_factory_reset_token_delete_with_http_info(
self,
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> None
"""Delete a factory reset token
Deletes any existing token that could be used to perform a factory reset on the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_factory_reset_token_delete_with_http_info(async_req=True)
>>> result = thread.get()
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/factory-reset-token', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_factory_reset_token_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayFactoryResetTokenGetResponse
"""List factory reset tokens
Displays a list of tokens used to perform a factory reset on the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_factory_reset_token_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayFactoryResetTokenGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api22_arrays_factory_reset_token_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api22_arrays_factory_reset_token_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/factory-reset-token', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayFactoryResetTokenGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_factory_reset_token_post_with_http_info(
self,
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayFactoryResetTokenResponse
"""Create a factory reset token
Creates a token that can be used to perform a factory reset on the array. Factory reset tokens can only be created after the array has been prepared for reset (e.g., all file systems, buckets, and snapshots must first be eradicated). After a token has been created, operations that would take the array out of the prepared state (e.g., creating file systems) are disabled until all tokens have been deleted.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_factory_reset_token_post_with_http_info(async_req=True)
>>> result = thread.get()
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayFactoryResetTokenResponse
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/factory-reset-token', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayFactoryResetTokenResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayGetResponse
"""GET arrays
List array attributes such as the array name, ID, version, and NTP servers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api22_arrays_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api22_arrays_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_http_specific_performance_get_with_http_info(
self,
end_time=None, # type: int
resolution=None, # type: int
start_time=None, # type: int
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayHttpSpecificPerformanceGet
"""GET arrays/http-specific-performance
List the HTTP performance metrics of the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_http_specific_performance_get_with_http_info(async_req=True)
>>> result = thread.get()
:param int end_time: When the time window ends (in milliseconds since epoch).
:param int resolution: The desired ms between samples. Available resolutions may depend on data type, `start_time` and `end_time`. In general `1000`, `30000`, `300000`, `1800000`, `7200000`, and `86400000` are possible values.
:param int start_time: When the time window starts (in milliseconds since epoch).
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayHttpSpecificPerformanceGet
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'end_time' in params and params['end_time'] < 0:
raise ValueError("Invalid value for parameter `end_time` when calling `api22_arrays_http_specific_performance_get`, must be a value greater than or equal to `0`")
if 'resolution' in params and params['resolution'] < 0:
raise ValueError("Invalid value for parameter `resolution` when calling `api22_arrays_http_specific_performance_get`, must be a value greater than or equal to `0`")
if 'start_time' in params and params['start_time'] < 0:
raise ValueError("Invalid value for parameter `start_time` when calling `api22_arrays_http_specific_performance_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/http-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayHttpSpecificPerformanceGet',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_nfs_specific_performance_get_with_http_info(
self,
end_time=None, # type: int
resolution=None, # type: int
start_time=None, # type: int
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayNfsSpecificPerformanceGet
"""GET arrays/nfs-specific-performance
List the NFS performance metrics of the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_nfs_specific_performance_get_with_http_info(async_req=True)
>>> result = thread.get()
:param int end_time: When the time window ends (in milliseconds since epoch).
:param int resolution: The desired ms between samples. Available resolutions may depend on data type, `start_time` and `end_time`. In general `1000`, `30000`, `300000`, `1800000`, `7200000`, and `86400000` are possible values.
:param int start_time: When the time window starts (in milliseconds since epoch).
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayNfsSpecificPerformanceGet
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'end_time' in params and params['end_time'] < 0:
raise ValueError("Invalid value for parameter `end_time` when calling `api22_arrays_nfs_specific_performance_get`, must be a value greater than or equal to `0`")
if 'resolution' in params and params['resolution'] < 0:
raise ValueError("Invalid value for parameter `resolution` when calling `api22_arrays_nfs_specific_performance_get`, must be a value greater than or equal to `0`")
if 'start_time' in params and params['start_time'] < 0:
raise ValueError("Invalid value for parameter `start_time` when calling `api22_arrays_nfs_specific_performance_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/nfs-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayNfsSpecificPerformanceGet',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_patch_with_http_info(
self,
array=None, # type: list
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayResponse
"""PATCH arrays
Modify the general configuration of the array including banner text, array name, NTP servers, and time zone.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_patch_with_http_info(array, async_req=True)
>>> result = thread.get()
:param Array array: (required)
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
# verify the required parameter 'array' is set
if array is None:
raise TypeError("Missing the required parameter `array` when calling `api22_arrays_patch`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'array' in params:
body_params = params['array']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_performance_get_with_http_info(
self,
end_time=None, # type: int
protocol=None, # type: str
resolution=None, # type: int
start_time=None, # type: int
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayPerformanceGetResponse
"""GET arrays/performance
Lists the overall performance metrics of the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_performance_get_with_http_info(async_req=True)
>>> result = thread.get()
:param int end_time: When the time window ends (in milliseconds since epoch).
:param str protocol: Display the performance of a specified protocol. Valid values are `all`, `HTTP`, `SMB`, `NFS`, and `S3`. If not specified, defaults to `all`, which will provide the combined performance of all available protocols.
:param int resolution: The desired ms between samples. Available resolutions may depend on data type, `start_time` and `end_time`. In general `1000`, `30000`, `300000`, `1800000`, `7200000`, and `86400000` are possible values.
:param int start_time: When the time window starts (in milliseconds since epoch).
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayPerformanceGetResponse
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'end_time' in params and params['end_time'] < 0:
raise ValueError("Invalid value for parameter `end_time` when calling `api22_arrays_performance_get`, must be a value greater than or equal to `0`")
if 'resolution' in params and params['resolution'] < 0:
raise ValueError("Invalid value for parameter `resolution` when calling `api22_arrays_performance_get`, must be a value greater than or equal to `0`")
if 'start_time' in params and params['start_time'] < 0:
raise ValueError("Invalid value for parameter `start_time` when calling `api22_arrays_performance_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'protocol' in params:
query_params.append(('protocol', params['protocol']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_performance_replication_get_with_http_info(
self,
end_time=None, # type: int
resolution=None, # type: int
start_time=None, # type: int
type=None, # type: str
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayPerformanceReplicationGetResp
"""GET arrays/performance/replication
List replication performance metrics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_performance_replication_get_with_http_info(async_req=True)
>>> result = thread.get()
:param int end_time: When the time window ends (in milliseconds since epoch).
:param int resolution: The desired ms between samples. Available resolutions may depend on data type, `start_time` and `end_time`. In general `1000`, `30000`, `300000`, `1800000`, `7200000`, and `86400000` are possible values.
:param int start_time: When the time window starts (in milliseconds since epoch).
:param str type: Display the metric of a specified object type. Valid values are `all`, `file-system`, and `object-store`. If not specified, defaults to `all`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayPerformanceReplicationGetResp
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'end_time' in params and params['end_time'] < 0:
raise ValueError("Invalid value for parameter `end_time` when calling `api22_arrays_performance_replication_get`, must be a value greater than or equal to `0`")
if 'resolution' in params and params['resolution'] < 0:
raise ValueError("Invalid value for parameter `resolution` when calling `api22_arrays_performance_replication_get`, must be a value greater than or equal to `0`")
if 'start_time' in params and params['start_time'] < 0:
raise ValueError("Invalid value for parameter `start_time` when calling `api22_arrays_performance_replication_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/performance/replication', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceReplicationGetResp',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_s3_specific_performance_get_with_http_info(
self,
end_time=None, # type: int
resolution=None, # type: int
start_time=None, # type: int
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArrayS3SpecificPerformanceGetResp
"""GET arrays/s3-specific-performance
List the S3 performance metrics of the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_s3_specific_performance_get_with_http_info(async_req=True)
>>> result = thread.get()
:param int end_time: When the time window ends (in milliseconds since epoch).
:param int resolution: The desired ms between samples. Available resolutions may depend on data type, `start_time` and `end_time`. In general `1000`, `30000`, `300000`, `1800000`, `7200000`, and `86400000` are possible values.
:param int start_time: When the time window starts (in milliseconds since epoch).
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArrayS3SpecificPerformanceGetResp
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'end_time' in params and params['end_time'] < 0:
raise ValueError("Invalid value for parameter `end_time` when calling `api22_arrays_s3_specific_performance_get`, must be a value greater than or equal to `0`")
if 'resolution' in params and params['resolution'] < 0:
raise ValueError("Invalid value for parameter `resolution` when calling `api22_arrays_s3_specific_performance_get`, must be a value greater than or equal to `0`")
if 'start_time' in params and params['start_time'] < 0:
raise ValueError("Invalid value for parameter `start_time` when calling `api22_arrays_s3_specific_performance_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/s3-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayS3SpecificPerformanceGetResp',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_space_get_with_http_info(
self,
end_time=None, # type: int
resolution=None, # type: int
start_time=None, # type: int
type=None, # type: str
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArraySpaceGetResponse
"""GET arrays/space
List available and used storage space on the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_space_get_with_http_info(async_req=True)
>>> result = thread.get()
:param int end_time: When the time window ends (in milliseconds since epoch).
:param int resolution: The desired ms between samples. Available resolutions may depend on data type, `start_time` and `end_time`. In general `1000`, `30000`, `300000`, `1800000`, `7200000`, and `86400000` are possible values.
:param int start_time: When the time window starts (in milliseconds since epoch).
:param str type: Display the metric of a specified object type. Valid values are `array`, `file-system`, and `object-store`. If not specified, defaults to `array`.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArraySpaceGetResponse
If the method is called asynchronously,
returns the request thread.
"""
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'end_time' in params and params['end_time'] < 0:
raise ValueError("Invalid value for parameter `end_time` when calling `api22_arrays_space_get`, must be a value greater than or equal to `0`")
if 'resolution' in params and params['resolution'] < 0:
raise ValueError("Invalid value for parameter `resolution` when calling `api22_arrays_space_get`, must be a value greater than or equal to `0`")
if 'start_time' in params and params['start_time'] < 0:
raise ValueError("Invalid value for parameter `start_time` when calling `api22_arrays_space_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/space', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArraySpaceGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
def api22_arrays_supported_time_zones_get_with_http_info(
self,
continuation_token=None, # type: str
filter=None, # type: str
limit=None, # type: int
names=None, # type: List[str]
offset=None, # type: int
sort=None, # type: List[str]
async_req=False, # type: bool
_return_http_data_only=False, # type: bool
_preload_content=True, # type: bool
_request_timeout=None, # type: Optional[int]
):
# type: (...) -> models.ArraysSupportedTimeZonesGetResponse
"""GET arrays/supported-time-zones
List supported time zones for the array.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.api22_arrays_supported_time_zones_get_with_http_info(async_req=True)
>>> result = thread.get()
:param str continuation_token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:param str filter: Exclude resources that don't match the specified criteria.
:param int limit: Limit the size of the response to the specified number of resources. A `limit` of `0` can be used to get the number of resources without getting all of the resources. It will be returned in the `total_item_count` field. If a client asks for a page size larger than the maximum number, the request is still valid. In that case the server just returns the maximum number of items, disregarding the client's page size request.
:param list[str] names: A comma-separated list of resource names. If there is not at least one resource that matches each of the elements of `names`, then an error is returned.
:param int offset: The offset of the first resource to return from a collection.
:param list[str] sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name). NOTE: If you provide a sort you will not get a `continuation_token` in the response.
:param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult.
:param bool _return_http_data_only: Returns only data field.
:param bool _preload_content: Response is converted into objects.
:param int _request_timeout: Total request timeout in seconds.
It can also be a tuple of (connection time, read time) timeouts.
:return: ArraysSupportedTimeZonesGetResponse
If the method is called asynchronously,
returns the request thread.
"""
if names is not None:
if not isinstance(names, list):
names = [names]
if sort is not None:
if not isinstance(sort, list):
sort = [sort]
params = {k: v for k, v in six.iteritems(locals()) if v is not None}
# Convert the filter into a string
if params.get('filter'):
params['filter'] = str(params['filter'])
if params.get('sort'):
params['sort'] = [str(_x) for _x in params['sort']]
if 'limit' in params and params['limit'] < 1:
raise ValueError("Invalid value for parameter `limit` when calling `api22_arrays_supported_time_zones_get`, must be a value greater than or equal to `1`")
if 'offset' in params and params['offset'] < 0:
raise ValueError("Invalid value for parameter `offset` when calling `api22_arrays_supported_time_zones_get`, must be a value greater than or equal to `0`")
collection_formats = {}
path_params = {}
query_params = []
if 'continuation_token' in params:
query_params.append(('continuation_token', params['continuation_token']))
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'offset' in params:
query_params.append(('offset', params['offset']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
collection_formats['sort'] = 'csv'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
# Authentication setting
auth_settings = ['AuthorizationHeader']
return self.api_client.call_api(
'/api/2.2/arrays/supported-time-zones', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArraysSupportedTimeZonesGetResponse',
auth_settings=auth_settings,
async_req=async_req,
_return_http_data_only=_return_http_data_only,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
collection_formats=collection_formats,
)
| 46.336434 | 449 | 0.641132 | 7,310 | 59,774 | 5.033105 | 0.047059 | 0.018265 | 0.032344 | 0.027397 | 0.919357 | 0.914764 | 0.912291 | 0.908839 | 0.905686 | 0.903675 | 0 | 0.010047 | 0.272326 | 59,774 | 1,289 | 450 | 46.372382 | 0.835824 | 0.375297 | 0 | 0.858247 | 0 | 0.033505 | 0.207992 | 0.044 | 0 | 0 | 0 | 0 | 0 | 1 | 0.01933 | false | 0 | 0.006443 | 0 | 0.045103 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
50adb4dd5d7a8eba44858cd07155d402ee85f175 | 58 | py | Python | cloudiscovery/provider/az/common_az.py | ecorbett135/cloud-disco | d047f6ef46136320f1d8939983d0040a57087dc4 | [
"Apache-2.0"
] | null | null | null | cloudiscovery/provider/az/common_az.py | ecorbett135/cloud-disco | d047f6ef46136320f1d8939983d0040a57087dc4 | [
"Apache-2.0"
] | null | null | null | cloudiscovery/provider/az/common_az.py | ecorbett135/cloud-disco | d047f6ef46136320f1d8939983d0040a57087dc4 | [
"Apache-2.0"
] | 1 | 2022-02-22T13:37:28.000Z | 2022-02-22T13:37:28.000Z | def generate_session():
print("Generate Session Stub") | 29 | 34 | 0.741379 | 7 | 58 | 6 | 0.714286 | 0.714286 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.137931 | 58 | 2 | 34 | 29 | 0.84 | 0 | 0 | 0 | 1 | 0 | 0.355932 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | true | 0 | 0 | 0 | 0.5 | 0.5 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
0ff77a7939a009c43d9e95418eb76a27a9f87f72 | 1,283 | py | Python | sallyforth/operator_words.py | russolsen/sallyforth | 480f6df6a5e2678829bc86b73f89c88565e28696 | [
"Apache-2.0"
] | 13 | 2020-04-14T16:48:10.000Z | 2022-02-04T22:18:00.000Z | sallyforth/operator_words.py | russolsen/sallyforth | 480f6df6a5e2678829bc86b73f89c88565e28696 | [
"Apache-2.0"
] | 1 | 2020-06-13T12:56:14.000Z | 2020-06-28T19:52:46.000Z | sallyforth/operator_words.py | russolsen/sallyforth | 480f6df6a5e2678829bc86b73f89c88565e28696 | [
"Apache-2.0"
] | 1 | 2021-09-11T09:36:29.000Z | 2021-09-11T09:36:29.000Z | from util import word
@word('>')
def gt(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b > a)
@word('<')
def lt(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b < a)
@word('=')
def eq(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(a==b)
@word('<=')
def le(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b<=a)
@word('>=')
def ge(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b>=a)
@word('+')
def add(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b+a)
@word('*')
def mul(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b*a)
@word('-')
def sub(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b-a)
@word('/')
def div(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(b/a)
@word('and')
def w_and(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(a and b)
@word('or')
def w_or(forth):
a = forth.stack.pop()
b = forth.stack.pop()
forth.stack.push(a or b)
@word('not')
def w_not(forth):
forth.stack.push(not forth.stack.pop())
| 17.337838 | 43 | 0.565082 | 210 | 1,283 | 3.438095 | 0.114286 | 0.484765 | 0.414127 | 0.243767 | 0.815789 | 0.815789 | 0.815789 | 0.815789 | 0.815789 | 0.815789 | 0 | 0 | 0.2159 | 1,283 | 73 | 44 | 17.575342 | 0.717694 | 0 | 0 | 0.372881 | 0 | 0 | 0.014832 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.20339 | false | 0 | 0.016949 | 0 | 0.220339 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
ba249a5a8cdd158738229d70672d959788aead26 | 12,416 | py | Python | python/dg_tools/generate_traj.py | machines-in-motion/dg_tools | 1c8d0c09da5185113e0dd69e6b59cd5c8270afa6 | [
"BSD-3-Clause"
] | null | null | null | python/dg_tools/generate_traj.py | machines-in-motion/dg_tools | 1c8d0c09da5185113e0dd69e6b59cd5c8270afa6 | [
"BSD-3-Clause"
] | 5 | 2019-11-29T11:58:51.000Z | 2021-06-04T14:52:26.000Z | python/dg_tools/generate_traj.py | machines-in-motion/dg_tools | 1c8d0c09da5185113e0dd69e6b59cd5c8270afa6 | [
"BSD-3-Clause"
] | null | null | null | """
@package py_dg_tools
@author Maximilien Naveau
@license License BSD-3-Clause
@copyright Copyright (c) 2019, New York University and Max Planck Gesellschaft.
@date 2019-08-01
@brief Generate trajectories in python using scypy
"""
import numpy as np
from scipy import interpolate
import matplotlib.pyplot as plt
def traj_generator(time_way_points, way_points, time):
# print ("time_way_points = ", time_way_points)
# print ("way_points[0] = ", way_points[0])
# print ("way_points[1] = ", way_points[1])
# print ("way_points[2] = ", way_points[2])
k0=3
if len(way_points[0])<3: k0=1
k1=3
if len(way_points[1])<3: k1=1
k2=3
if len(way_points[2])<3: k2=1
x_s = interpolate.splrep(time_way_points, np.array(way_points[0]), s=0, k=k0)
y_s = interpolate.splrep(time_way_points, np.array(way_points[1]), s=0, k=k1)
z_s = interpolate.splrep(time_way_points, np.array(way_points[2]), s=0, k=k2)
pos = np.array( [interpolate.splev(time, x_s, der=0),
interpolate.splev(time, y_s, der=0),
interpolate.splev(time, z_s, der=0)] )
vel = np.array( [interpolate.splev(time, x_s, der=1),
interpolate.splev(time, y_s, der=1),
interpolate.splev(time, z_s, der=1)] )
return pos, vel
if __name__ == "__main__":
### Parameters
time_phase = 1.2
T = 0.001
time_0 = 0.0
time_1 = time_phase
phase = np.linspace(time_0, time_1, num=int((time_1-time_0)/T), endpoint=True)
nb_pt_phase = phase.shape[0]
hl_zref = -0.15
hr_zref = -0.05
dx = +0.00
z_final = 0.0
### Phase 0 ################################################################
## CoM
# oMcom ('com: ', matrix([[0.2 , 0.00044412, 0.19240999]]))
# bMcom
com0_x = 0.2
com0_y = 0.00044412
com0_z = 0.19240999
com, dcom = traj_generator([time_0, time_1],
[[0.2, 0.2], [0.00044412, 0.00044412], [0.19240999, 0.19240999]], phase)
## HL
# oMhl ('hl: ', matrix([[ 0.01 , 0.14205 , -0.00294615]]))
# bMhl ('hl: ', matrix([[-0.19 , 0.14205 , -0.22294615]]))
hl, dhl = traj_generator([time_0, time_0 + T, time_0 + 2*T, time_1 - 2*T, time_1 - T, time_1],
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.22294615, -0.22294615, -0.22294615, hl_zref, hl_zref, hl_zref]], phase)
## HR
# ('hr: ', matrix([[ 0.01 , -0.14205 , -0.00294615]]))
# bMhr ('hr: ', matrix([[-0.19 , -0.14205 , -0.22294615]]))
hr, dhr = traj_generator([time_0, time_0 + T, time_0 + 2*T, time_1 - 2*T, time_1 - T, time_1],
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-0.22294615, -0.22294615, -0.22294615, hr_zref, hr_zref, hr_zref]], phase)
### FL
# ('fl: ', matrix([[ 0.39 , 0.14205 , -0.00294615]]))
# bMfl ('fl: ', matrix([[ 0.19 , 0.14205 , -0.22294615]]))
fl, dfl = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [-0.22294615, -0.22294615]], phase)
### FR
# ('fr: ', matrix([[ 0.39 , -0.14205 , -0.00294615]]))
# bMfr ('fr: ', matrix([[ 0.19 , -0.14205 , -0.22294615]]))
fr, dfr = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [-0.22294615, -0.22294615]], phase)
### Phase 0.5 ################################################################
## CoM
# oMcom ('com: ', matrix([[0.2 , 0.00044412, 0.19240999]]))
# bMcom
com0_x = 0.2
com0_y = 0.00044412
com0_z = 0.19240999
com_phase, dcom_phase = traj_generator([time_0, time_1],
[[0.2, 0.2], [0.00044412, 0.00044412], [0.19240999, 0.19240999]], phase)
## HL
# oMhl ('hl: ', matrix([[ 0.01 , 0.14205 , -0.00294615]]))
# bMhl ('hl: ', matrix([[-0.19 , 0.14205 , -0.22294615]]))
hl_phase, dhl_phase = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [hl_zref, hl_zref]], phase)
## HR
# ('hr: ', matrix([[ 0.01 , -0.14205 , -0.00294615]]))
# bMhr ('hr: ', matrix([[-0.19 , -0.14205 , -0.22294615]]))
hr_phase, dhr_phase = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [hr_zref, hr_zref]], phase)
### FL
# ('fl: ', matrix([[ 0.39 , 0.14205 , -0.00294615]]))
# bMfl ('fl: ', matrix([[ 0.19 , 0.14205 , -0.22294615]]))
x_init = 0.0
z_init = -0.22294615
x_final = 0.1
t_s = [time_0, time_0+T, time_0+2*T, (time_1-time_0)/3.0, 2.0*(time_1-time_0)/3.0, time_1-2*T, time_1-T, time_1]
x = [x_init, x_init, x_init, x_init + dx, x_init + dx, x_final, x_final, x_final]
y = [0.0] * len(t_s)
z = [z_init, z_init, z_init, z_final, z_final + 0.1, z_final, z_final, z_final]
fl_phase, dfl_phase = traj_generator(t_s, [x, y, z], phase)
### FR
# ('fr: ', matrix([[ 0.39 , -0.14205 , -0.00294615]]))
# bMfr ('fr: ', matrix([[ 0.19 , -0.14205 , -0.22294615]]))
fr_phase, dfr_phase = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [-0.22294615, -0.22294615]], phase)
# stack the trajectories
com = np.concatenate((com, com_phase), axis=1)
hl = np.concatenate((hl, hl_phase), axis=1)
dhl = np.concatenate((dhl, dhl_phase), axis=1)
hr = np.concatenate((hr, hr_phase), axis=1)
dhr = np.concatenate((dhr, dhr_phase), axis=1)
fl = np.concatenate((fl, fl_phase), axis=1)
dfl = np.concatenate((dfl, dfl_phase), axis=1)
fr = np.concatenate((fr, fr_phase), axis=1)
dfr = np.concatenate((dfr, dfr_phase), axis=1)
### Phase 1 ################################################################
## CoM
# oMcom ('com: ', matrix([[0.2 , 0.00044412, 0.19240999]]))
# bMcom
com0_x = 0.2
com0_y = 0.00044412
com0_z = 0.19240999
com_phase, dcom_phase = traj_generator([time_0, time_1],
[[0.2, 0.2], [0.00044412, 0.00044412], [0.19240999, 0.19240999]], phase)
## HL
# oMhl ('hl: ', matrix([[ 0.01 , 0.14205 , -0.00294615]]))
# bMhl ('hl: ', matrix([[-0.19 , 0.14205 , -0.22294615]]))
hl_phase, dhl_phase = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [hl_zref, hl_zref]], phase)
## HR
# ('hr: ', matrix([[ 0.01 , -0.14205 , -0.00294615]]))
# bMhr ('hr: ', matrix([[-0.19 , -0.14205 , -0.22294615]]))
hr_phase, dhr_phase = traj_generator([time_0, time_1],
[[0.0, 0.0], [0.0, 0.0], [hr_zref, hr_zref]], phase)
### FL
# ('fl: ', matrix([[ 0.39 , 0.14205 , -0.00294615]]))
# bMfl ('fl: ', matrix([[ 0.19 , 0.14205 , -0.22294615]]))
fl_phase, dfl_phase = traj_generator([time_0, time_1],
[[0.1, 0.1], [0.0, 0.0], [z_final, z_final]], phase)
### FR
# ('fr: ', matrix([[ 0.39 , -0.14205 , -0.00294615]]))
# bMfr ('fr: ', matrix([[ 0.19 , -0.14205 , -0.22294615]]))
x_init = 0.0
z_init = -0.22294615
x_final = 0.1
t_s = [time_0, time_0+T, time_0+2*T, (time_1-time_0)/3.0, 2.0*(time_1-time_0)/3.0, time_1-2*T, time_1-T, time_1]
x = [x_init, x_init, x_init, x_init + dx, x_init + dx, x_final, x_final, x_final]
y = [0.0] * len(t_s)
z = [z_init, z_init, z_init, z_final/3.0, z_final + 0.1, z_final, z_final, z_final]
fr_phase, dfr_phase = traj_generator(t_s, [x, y, z], phase)
# stack the trajectories
com = np.concatenate((com, com_phase), axis=1)
hl = np.concatenate((hl, hl_phase), axis=1)
dhl = np.concatenate((dhl, dhl_phase), axis=1)
hr = np.concatenate((hr, hr_phase), axis=1)
dhr = np.concatenate((dhr, dhr_phase), axis=1)
fl = np.concatenate((fl, fl_phase), axis=1)
dfl = np.concatenate((dfl, dfl_phase), axis=1)
fr = np.concatenate((fr, fr_phase), axis=1)
dfr = np.concatenate((dfr, dfr_phase), axis=1)
### Phase 2 ################################################################
## CoM
# oMcom ('com: ', matrix([[0.2 , 0.00044412, 0.19240999]]))
# bMcom
com0_x = 0.2
com0_y = 0.00044412
com0_z = 0.19240999
com_phase, dcom_phase = traj_generator([time_0, time_1],
[[0.2, 0.2], [0.00044412, 0.00044412], [0.19240999, 0.19240999]], phase)
## HL
# oMhl ('hl: ', matrix([[ 0.01 , 0.14205 , -0.00294615]]))
# bMhl ('hl: ', matrix([[-0.19 , 0.14205 , -0.22294615]]))
hl_phase, dhl_phase = traj_generator([time_0, time_0 + T, time_0 + 2*T, time_1 - 2*T, time_1 - T, time_1],
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[hl_zref, hl_zref, hl_zref, -0.22294615, -0.22294615, -0.22294615]], phase)
## HR
# ('hr: ', matrix([[ 0.01 , -0.14205 , -0.00294615]]))
# bMhr ('hr: ', matrix([[-0.19 , -0.14205 , -0.22294615]]))
hr_phase, dhr_phase = traj_generator([time_0, time_0 + T, time_0 + 2*T, time_1 - 2*T, time_1 - T, time_1],
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[hr_zref, hr_zref, hr_zref, -0.22294615, -0.22294615, -0.22294615]], phase)
### FL
# ('fl: ', matrix([[ 0.39 , 0.14205 , -0.00294615]]))
# bMfl ('fl: ', matrix([[ 0.19 , 0.14205 , -0.22294615]]))
fl_phase, dfl_phase = traj_generator([time_0, time_1],
[[0.1, 0.1], [0.0, 0.0], [z_final, z_final]], phase)
### FR
# ('fr: ', matrix([[ 0.39 , -0.14205 , -0.00294615]]))
# bMfr ('fr: ', matrix([[ 0.19 , -0.14205 , -0.22294615]]))
fr_phase, dfr_phase = traj_generator([time_0, time_1],
[[0.1, 0.1], [0.0, 0.0], [z_final, z_final]], phase)
com = np.concatenate((com, com_phase), axis=1)
hl = np.concatenate((hl, hl_phase), axis=1)
dhl = np.concatenate((dhl, dhl_phase), axis=1)
hr = np.concatenate((hr, hr_phase), axis=1)
dhr = np.concatenate((dhr, dhr_phase), axis=1)
fl = np.concatenate((fl, fl_phase), axis=1)
dfl = np.concatenate((dfl, dfl_phase), axis=1)
fr = np.concatenate((fr, fr_phase), axis=1)
dfr = np.concatenate((dfr, dfr_phase), axis=1)
### creation of the data files #############################################
nb_pt = com.shape[1]
time = np.linspace(0.0, nb_pt*T, num=nb_pt, endpoint=True)
print ("com.shape", com.shape)
print ("time.shape", time.shape)
quadruped_com = np.vstack((time, com))
np.savetxt("/tmp/quadruped_com.dat", quadruped_com.T, delimiter=" ")
#
quadruped_positions_eff = np.vstack((
fl, np.zeros((3,nb_pt)), fr, np.zeros((3,nb_pt)),
hl, np.zeros((3,nb_pt)), hr, np.zeros((3,nb_pt)),
))
np.savetxt("/tmp/quadruped_positions_eff.dat", quadruped_positions_eff.T, delimiter=" ")
#
quadruped_velocities_eff = np.vstack((
dfl, np.zeros((3,nb_pt)), dfr, np.zeros((3,nb_pt)),
dhl, np.zeros((3,nb_pt)), dhr, np.zeros((3,nb_pt)),
))
np.savetxt("/tmp/quadruped_velocities_eff.dat", quadruped_velocities_eff.T, delimiter=" ")
### plots
# plt.figure("com")
# plt.plot(time, com_x, ':',
# time, com_y, '-',
# time, com_z, '--')
# plt.legend(['com_x', 'com_y', 'com_z'], loc='best')
#
# plt.figure("fl")
# plt.plot(time, fl_x, ':',
# time, fl_y, '-',
# time, fl_z, '--')
# plt.legend(['fl_x', 'fl_y', 'fl_z'], loc='best')
#
# plt.figure("fx")
# plt.plot(time, fl_x, ':',
# time, fl_y, '-',
# time, fl_z, '--')
# plt.legend(['fl_x', 'fl_y', 'fl_z'], loc='best')
#
plt.figure("pos")
plt.plot(time, fl[0,:], ':',
time, fl[1,:], '-',
time, fl[2,:], '--',
time, fr[0,:], ':',
time, fr[1,:], '-',
time, fr[2,:], '--',
time, hl[0,:], ':',
time, hl[1,:], '-',
time, hl[2,:], '--',
time, hr[0,:], ':',
time, hr[1,:], '-',
time, hr[2,:], '--',
)
plt.legend(['fl_x', 'fl_y', 'fl_z', 'fr_x', 'fr_y', 'fr_z'], loc='best')
# #
# plt.figure("vel")
# plt.plot(time, dfl[0,:], ':',
# time, dfl[1,:], '-',
# time, dfl[2,:], '--',
# time, dfr[0,:], ':',
# time, dfr[1,:], '-',
# time, dfr[2,:], '--')
# plt.legend(['fl_dx', 'fl_dy', 'fl_dz', 'fr_dx', 'fr_dy', 'fr_dz'], loc='best')
plt.show()
| 39.044025 | 116 | 0.514497 | 1,948 | 12,416 | 3.102156 | 0.073922 | 0.053947 | 0.070495 | 0.084064 | 0.78471 | 0.759722 | 0.736555 | 0.736555 | 0.712725 | 0.694192 | 0 | 0.162803 | 0.249517 | 12,416 | 317 | 117 | 39.167192 | 0.485727 | 0.291237 | 0 | 0.530864 | 1 | 0 | 0.01965 | 0.010424 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006173 | false | 0 | 0.018519 | 0 | 0.030864 | 0.012346 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e8bc88b3556b7bf2e461a2445405267c48c5d95e | 3,526 | py | Python | api-client/pangea_api/contrib/tagging/cli.py | LongTailBio/pangea-django | 630551dded7f9e38f95eda8c36039e0de46961e7 | [
"MIT"
] | null | null | null | api-client/pangea_api/contrib/tagging/cli.py | LongTailBio/pangea-django | 630551dded7f9e38f95eda8c36039e0de46961e7 | [
"MIT"
] | 27 | 2020-03-26T02:55:12.000Z | 2022-03-12T00:55:04.000Z | api-client/pangea_api/contrib/tagging/cli.py | LongTailBio/pangea-django | 630551dded7f9e38f95eda8c36039e0de46961e7 | [
"MIT"
] | 1 | 2021-09-14T08:15:54.000Z | 2021-09-14T08:15:54.000Z |
import click
from .tag import Tag
from ... import (
Knex,
User,
Organization,
)
import logging
logger = logging.getLogger(__name__)
logger.setLevel(10)
logger.addHandler(logging.StreamHandler())
@click.group('tag')
def tag_main():
pass
@tag_main.command('create')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.argument('tag_names', nargs=-1)
def create_tags(email, password, endpoint, tag_names):
knex = Knex(endpoint)
if email and password:
User(knex, email, password).login()
for tag_name in tag_names:
tag = Tag(knex, tag_name).idem()
click.echo(tag, err=True)
@tag_main.command('samples-in-group')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.argument('org_name')
@click.argument('group_name')
@click.argument('tag_names', nargs=-1)
def cli_tag_samples_in_group(email, password, endpoint, org_name, group_name, tag_names):
knex = Knex(endpoint)
if email and password:
User(knex, email, password).login()
tags = [Tag(knex, tag_name).get() for tag_name in tag_names]
org = Organization(knex, org_name).get()
grp = org.sample_group(group_name).get()
for sample in grp.get_samples():
for tag in tags:
tag(sample)
click.echo(sample, err=True)
@tag_main.command('group')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.argument('org_name')
@click.argument('group_name')
@click.argument('tag_names', nargs=-1)
def cli_tag_samples_in_group(email, password, endpoint, org_name, group_name, tag_names):
knex = Knex(endpoint)
if email and password:
User(knex, email, password).login()
tags = [Tag(knex, tag_name).get() for tag_name in tag_names]
org = Organization(knex, org_name).get()
grp = org.sample_group(group_name).get()
for tag in tags:
tag(grp)
@tag_main.group('list')
def tag_list():
pass
@tag_list.command('random-samples')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('-n', '--num-samples', default=100, help='maximum number of samples')
@click.argument('tag_name')
def cli_tag_samples_in_group(email, password, endpoint, num_samples, tag_name):
knex = Knex(endpoint)
if email and password:
User(knex, email, password).login()
tag = Tag(knex, tag_name).get()
for sample in tag.get_random_samples(n=num_samples):
click.echo(sample, err=True)
@tag_list.command('groups')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('-n', '--num-samples', default=100, help='maximum number of samples')
@click.argument('tag_name')
def cli_tag_samples_in_group(email, password, endpoint, num_samples, tag_name):
knex = Knex(endpoint)
if email and password:
User(knex, email, password).login()
tag = Tag(knex, tag_name).get()
for sample_group in tag.get_sample_groups():
click.echo(sample_group, err=True)
| 33.264151 | 89 | 0.685196 | 493 | 3,526 | 4.730223 | 0.127789 | 0.080189 | 0.025729 | 0.036449 | 0.816038 | 0.77916 | 0.749142 | 0.741852 | 0.741852 | 0.741852 | 0 | 0.003627 | 0.139818 | 3,526 | 105 | 90 | 33.580952 | 0.765249 | 0 | 0 | 0.662921 | 0 | 0 | 0.173901 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.078652 | false | 0.247191 | 0.044944 | 0 | 0.123596 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.