code
stringlengths
22
1.05M
apis
listlengths
1
3.31k
extract_api
stringlengths
75
3.25M
import unittest import json from kit_test_helper import TestHelper from bit_extension import BitExtension # selenium stuff from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.common.alert import Alert from selenium.webdriver.support.select import Select # Kit Tree class KitTreeTestCase(unittest.TestCase): filename = None CONST_HOST = None def setUp(self): self.browser = webdriver.Firefox() def getTestCase(self, file_path): with open(file_path) as data_file: data = json.load(data_file) return data def runTest(self): helper = TestHelper() helper.dbEstablishConnection() if helper.dbTestConnection() == False: return data = self.getTestCase("json_test_cases/"+self.filename) existing, existing_lookup, matching, matching_lookup = data["existing"]["list"], data["existing"]["lookup"], data["matching"]["kit_tree"], data["matching"]["lookup"] # self.prepareTest(existing, existing_lookup, helper) print("--"*5, "BEGIN TEST ALL", "--"*5) for i in matching: self.search(i["pn"], i["revision_name"]) self.checkKitNavigation(i["kit_navigation"]) self.checkParentNavigation(i["parent_navigation"]) self.checkMasterNavigation(i["master_navigation"]) self.checkMainGrid(i["gridview"]) print("--"*5, "BEGIN TEST DIRECT", "--"*5) for i in matching: self.search(i["pn"], i["revision_name"], type="DIRECT") self.checkKitNavigation(i["kit_navigation"], type="DIRECT") self.checkParentNavigation(i["parent_navigation"]) self.checkMasterNavigation(i["master_navigation"]) self.checkMainGrid(i["direct_gridview"], type="DIRECT") print("--"*5, "BEGIN TEST DROPDOWN CHANGE", "--"*5) self.checkRevisionDropDownChanged(matching[len(matching)-2], matching[len(matching)-1]) self.checkRevisionDropDownChanged(matching[len(matching)-2], matching[len(matching)-1], type="DIRECT") helper.dbCloseConnection() def prepareTest(self, existing, existing_lookup, helper): helper.prepareTest() print("***SETTING UP EXISTING DATA IN KIT LIST***") for kitlist in existing: helper.insertKitList(kitlist["pn"], kitlist["master_pn"], kitlist["parent_pn"], kitlist["revision"], kitlist["subkit_revision"], kitlist["qty"], kitlist["uom"], kitlist["is_kit"], kitlist["description"]) for lookup in existing_lookup: helper.insertKitLookup(lookup["subkit_pn"], lookup["master_pn"], lookup["subkit_revision"], lookup["master_revision"]) print("***DONE SETTING UP EXISTING DATA***") def search(self, pn, revision, type="ALL"): print(pn, revision) self.browser.get(self.CONST_HOST+"UApplication3/wh/kitrec/kit_tree.aspx") wait = WebDriverWait(self.browser, 10) wait.until(EC.presence_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_txt_PN"))) kitPN = self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_txt_PN") if type == "ALL": kitPN.send_keys(str(pn) + Keys.RETURN) else: kitPN.send_keys(str(pn)) wait.until(EC.element_to_be_clickable((By.ID, "ctl00_ContentPlaceHolder1_btn_searchDirect"))) self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_btn_searchDirect").click() wait.until(EC.invisibility_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_img_loading"))) self.searchStep1(pn, revision, wait) def searchStep1(self, pn, revision, wait): try: wait.until(lambda driver: self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_pnl_candidates").is_displayed() or self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_btn_searchAll").is_enabled()) except Exception: self.searchStep1(pn, revision, wait) return try: if not self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_pnl_candidates").is_displayed(): raise Exception("") wait.until(EC.presence_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_CandidatesGridView"))) revision_qs = "&revision="+str(BitExtension().revisionNameToLong(revision)) if revision != "All" and revision != "ALL" else "" self.browser.find_element_by_css_selector("table#ctl00_ContentPlaceHolder1_CandidatesGridView.MyDataGridCaption tr.gridBody[onclick*='kit_tree.aspx?pn="+pn+revision_qs+"']").click() if revision_qs != "": self.checkRevisionDropDown(wait) Select(self.browser.find_element_by_css_selector("select#ctl00_ContentPlaceHolder1_ddl_kitRevision")).select_by_value(str(BitExtension().revisionNameToLong(revision))) wait.until(EC.invisibility_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_img_loading"))) wait.until(lambda driver: self.browser.find_element_by_css_selector("select#ctl00_ContentPlaceHolder1_ddl_kitRevision option[value='"+str(BitExtension().revisionNameToLong(revision))+"']").is_selected()) except Exception as e: self.checkRevisionDropDown(wait) Select(self.browser.find_element_by_css_selector("select#ctl00_ContentPlaceHolder1_ddl_kitRevision")).select_by_value(str(BitExtension().revisionNameToLong(revision))) wait.until(EC.invisibility_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_img_loading"))) wait.until(lambda driver: self.browser.find_element_by_css_selector("select#ctl00_ContentPlaceHolder1_ddl_kitRevision option[value='"+str(BitExtension().revisionNameToLong(revision))+"']").is_selected()) def checkRevisionDropDown(self, wait): wait.until(EC.presence_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_ddl_kitRevision"))) self.assertTrue(self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_ddl_kitRevision").is_displayed(), msg="kit revision ddl not displayed") def checkRevisionDropDownChanged(self, child1, child2, type="ALL"): if len(child1["kit_navigation"]["children"]) > 0 and len(child2["kit_navigation"]["children"]) > 0 and child1["pn"] == child2["pn"]: wait = WebDriverWait(self.browser, 10) self.search(child1["pn"], child1["revision_name"], type=type) self.checkRevisionDropDown(wait) print("SWITCH TO", child2["pn"], child2["revision_name"]) Select(self.browser.find_element_by_css_selector("select#ctl00_ContentPlaceHolder1_ddl_kitRevision")).select_by_value(str(BitExtension().revisionNameToLong(child2["revision_name"]))) wait.until(EC.invisibility_of_element_located((By.ID, "ctl00_ContentPlaceHolder1_img_loading"))) wait.until(lambda driver: self.browser.find_element_by_css_selector("select#ctl00_ContentPlaceHolder1_ddl_kitRevision option[value='"+str(BitExtension().revisionNameToLong(child2["revision_name"]))+"']").is_selected()) if type == "ALL": self.checkMainGrid(child2["gridview"]) else: self.checkMainGrid(child2["direct_gridview"]) def checkMainGrid(self, children, type="ALL"): print("--"*5, "CHECK MAIN GRID", "--"*5) testChildren = [] for child in children: if child["is_kit"] != "1" or type == "DIRECT": testChildren.append(child) rows = self.browser.find_elements_by_css_selector("table#ctl00_ContentPlaceHolder1_MainGridView.MyDataGridCaption tbody tr.gridBody") self.assertEqual(len(rows), len(testChildren), msg="MainGridView: Number of children shown does not match test children") matchAllChildren = True for row in rows: pn = row.find_element_by_css_selector("td:nth-child(1)").text quantity = row.find_element_by_css_selector("td:nth-child(2)").text uom = row.find_element_by_css_selector("td:nth-child(3)").text is_kit = "1" if row.find_element_by_css_selector("input[type='checkbox']").is_selected() else "0" description = row.find_element_by_css_selector("td:nth-child(5)").text childMatched = False index = 0 for child in testChildren: if child["pn"] == pn and child["qty"] == quantity and child["uom"] == uom and child["is_kit"] == is_kit and child["description"] == description: print("===", child["pn"], "matched") del testChildren[index] childMatched = True break index += 1 if not childMatched: print(pn, quantity, uom, is_kit, description) matchAllChildren = False break self.assertTrue(matchAllChildren, msg="MainGridView: Not every children matches!") self.assertTrue(len(testChildren) == 0, msg="MainGridView: Not every children is displayed!") def checkKitNavigation(self, tree, type="ALL"): print("--"*5, "CHECK KIT NAVIGATION", "--"*5) levels = "" string = "div#ctl00_ContentPlaceHolder1_tree_kitNavigation > div "+levels+"> table > tbody > tr > td > a" if type != "ALL": string = "div#ctl00_ContentPlaceHolder1_tree_kitNavigation "+levels+"> table > tbody > tr > td > a" stack = [] stack.append(tree) while len(stack) > 0: print("Testing level:", len(levels.split("> div "))) for x in range(len(stack)): current = stack.pop(0) for i in current["children"]: stack.append(i) rows = self.browser.find_elements_by_css_selector(string) tree_level = [] for row in rows: if row.get_attribute("textContent") != "" and row.get_attribute("textContent") != "[Edit]": tree_level.append((row.get_attribute("textContent"), row.get_attribute("href"))) self.assertEqual(len(stack), len(tree_level)) matchAllChildren = True for child in stack: childMatched = False index = 0 for (td, href) in tree_level: if child["pn"] == td and str(BitExtension().revisionNameToLong(child["revision_name"])) in href: childMatched = True print("===", child["pn"], "matched") del tree_level[index] index += 1 if not childMatched: print("Missing", child["pn"], child["revision_name"]) matchAllChildren = False break self.assertTrue(matchAllChildren, msg="kitNavigation: Not every children matches!") self.assertTrue(len(tree_level) == 0, msg="kitNavigation: Not every children is displayed!") levels = levels + "> div " string = "div#ctl00_ContentPlaceHolder1_tree_kitNavigation > div "+levels+"> table > tbody > tr > td > a" if type == "DIRECT": break def checkParentNavigation(self, parents): print("--"*5, "CHECK PARENT NAVIGATION", "--"*5) testParents = [] for i in parents: testParents.append(i) element = self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_tree_parent") rows = element.find_elements_by_css_selector("div a") self.assertEqual(len(rows), len(testParents), msg="parentNavigation: wrong number of parents displayed") matchAllParents = True for row in rows: parentMatched = False index = 0 for parent in testParents: if row.get_attribute("textContent") == parent["pn"] and str(BitExtension().revisionNameToLong(parent["revision_name"])) in row.get_attribute("href"): parentMatched = True print("===", parent["pn"], "matched") del testParents[index] break index += 1 if not parentMatched: print("Missing", row.get_attribute("textContent")) matchAllParents = False break self.assertTrue(matchAllParents, msg="parentNavigation: Not every parent matches!") self.assertTrue(len(testParents) == 0, msg="parentNavigation: Not every parent is displayed!") def checkMasterNavigation(self, masters): print("--"*5, "CHECK MASTER NAVIGATION", "--"*5) testMasters = [] for i in masters: testMasters.append(i) element = self.browser.find_element_by_id("ctl00_ContentPlaceHolder1_tree_master") rows = element.find_elements_by_css_selector("div a") self.assertEqual(len(rows), len(testMasters), msg="masterNavigation: wrong number of masters displayed") matchAllMasters = True for row in rows: masterMatched = False index = 0 for master in testMasters: if row.get_attribute("textContent") == master["pn"] and str(BitExtension().revisionNameToLong(master["revision_name"])) in row.get_attribute("href"): masterMatched = True print("===", master["pn"], "matched") del testMasters[index] break index += 1 if not masterMatched: print("Missing", row.get_attribute("textContent")) matchAllMasters = False break self.assertTrue(matchAllMasters, msg="masterNavigation: Not every master matches!") self.assertTrue(len(testMasters) == 0, msg="masterNavigation: Not every master is displayed!")
[ "selenium.webdriver.support.expected_conditions.presence_of_element_located", "json.load", "selenium.webdriver.support.expected_conditions.element_to_be_clickable", "bit_extension.BitExtension", "selenium.webdriver.Firefox", "selenium.webdriver.support.expected_conditions.invisibility_of_element_located", "kit_test_helper.TestHelper", "selenium.webdriver.support.ui.WebDriverWait" ]
[((652, 671), 'selenium.webdriver.Firefox', 'webdriver.Firefox', ([], {}), '()\n', (669, 671), False, 'from selenium import webdriver\n'), ((822, 834), 'kit_test_helper.TestHelper', 'TestHelper', ([], {}), '()\n', (832, 834), False, 'from kit_test_helper import TestHelper\n'), ((2865, 2896), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['self.browser', '(10)'], {}), '(self.browser, 10)\n', (2878, 2896), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((755, 775), 'json.load', 'json.load', (['data_file'], {}), '(data_file)\n', (764, 775), False, 'import json\n'), ((2910, 2985), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_txt_PN')"], {}), "((By.ID, 'ctl00_ContentPlaceHolder1_txt_PN'))\n", (2940, 2985), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((3362, 3450), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'EC.invisibility_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_img_loading')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_img_loading'))\n", (3396, 3450), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((5508, 5596), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_ddl_kitRevision')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_ddl_kitRevision'))\n", (5538, 5596), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((5958, 5989), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['self.browser', '(10)'], {}), '(self.browser, 10)\n', (5971, 5989), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((3177, 3262), 'selenium.webdriver.support.expected_conditions.element_to_be_clickable', 'EC.element_to_be_clickable', (["(By.ID, 'ctl00_ContentPlaceHolder1_btn_searchDirect')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_btn_searchDirect'))\n", (3203, 3262), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((3968, 4059), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_CandidatesGridView')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_CandidatesGridView'))\n", (3998, 4059), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((6352, 6440), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'EC.invisibility_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_img_loading')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_img_loading'))\n", (6386, 6440), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((4621, 4709), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'EC.invisibility_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_img_loading')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_img_loading'))\n", (4655, 4709), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((5161, 5249), 'selenium.webdriver.support.expected_conditions.invisibility_of_element_located', 'EC.invisibility_of_element_located', (["(By.ID, 'ctl00_ContentPlaceHolder1_img_loading')"], {}), "((By.ID,\n 'ctl00_ContentPlaceHolder1_img_loading'))\n", (5195, 5249), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((6277, 6291), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (6289, 6291), False, 'from bit_extension import BitExtension\n'), ((4091, 4105), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (4103, 4105), False, 'from bit_extension import BitExtension\n'), ((4560, 4574), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (4572, 4574), False, 'from bit_extension import BitExtension\n'), ((5101, 5115), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (5113, 5115), False, 'from bit_extension import BitExtension\n'), ((10625, 10639), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (10637, 10639), False, 'from bit_extension import BitExtension\n'), ((11722, 11736), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (11734, 11736), False, 'from bit_extension import BitExtension\n'), ((9378, 9392), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (9390, 9392), False, 'from bit_extension import BitExtension\n'), ((6579, 6593), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (6591, 6593), False, 'from bit_extension import BitExtension\n'), ((4849, 4863), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (4861, 4863), False, 'from bit_extension import BitExtension\n'), ((5388, 5402), 'bit_extension.BitExtension', 'BitExtension', ([], {}), '()\n', (5400, 5402), False, 'from bit_extension import BitExtension\n')]
# Roll 'n' Jump # Written in 2020, 2021 by <NAME>, <NAME>, # <NAME>, <NAME> # To the extent possible under law, the author(s) have dedicated all # copyright and related and neighboring rights to this software to the # public domain worldwide. This software is distributed without any warranty. # You should have received a copy of the CC0 Public Domain Dedication along # with this software. If not, see # <http://creativecommons.org/publicdomain/zero/1.0/>. """Fichier de test pour score.""" import os from hypothesis import given from hypothesis.strategies import characters, integers, text, lists, tuples import rollnjump.main as main import rollnjump.conf as cf import rollnjump.score as scre cf.SCORES = os.path.join(os.path.dirname(__file__), "test_score.txt") @given(integers()) def test_print(number): """Test pour les fonctions d'affichage.""" # Simples appels aux fonctions main.initialization(False) scre.score(number) scre.score_endgame(number) cf.LANG = "fr" scre.winner_endgame() cf.LANG = "en" scre.winner_endgame() alphanum_char = characters(min_codepoint=0x30, max_codepoint=0x7A, blacklist_characters=[':', ';', '<', '=', '>', '?', '@', '[', '\\', ']', '^', '_', '`']) score_list = tuples(integers(min_value=0), text(alphanum_char)) @given(lists(score_list, min_size=1, max_size=5)) def test_scoreboard(scores): """Test pour les fonctions relatives au tableau.""" scre.init_best_score() for (score, name) in scores: scre.PLAYER = name scre.set_best_score(score) read_scores = scre.get_scores() scores = list(sorted(scores, key=lambda x: -x[0])) assert read_scores == scores last_score = scre.get_last_best_score() assert last_score == scores[-1][0] scre.PLAYER = scores[0][1] assert scre.maj(scores[0][0] + 1) assert scre.get_scores()[0] == scores[0] for _ in range(5): if scre.maj(10): scre.set_best_score(10) assert not scre.maj(1) scre.init_best_score() @given(lists(text())) def test_corrupted_board_random(contents): """Test de robustesse en cas d'erreur dans le fichier des scores.""" with open(cf.SCORES, 'w') as board: for line in contents: board.write(line + '\n') scre.get_scores() def test_corrupted_board(): """Test similaire non randomisé pour assurer la couverture.""" with open(cf.SCORES, 'w') as board: for line in ["fsdq;0;vd", "s;s", "bcds"]: board.write(line + '\n') assert scre.get_scores() == []
[ "rollnjump.score.winner_endgame", "hypothesis.strategies.lists", "rollnjump.main.initialization", "rollnjump.score.get_scores", "hypothesis.strategies.characters", "os.path.dirname", "rollnjump.score.init_best_score", "rollnjump.score.set_best_score", "hypothesis.strategies.text", "rollnjump.score.get_last_best_score", "hypothesis.strategies.integers", "rollnjump.score.maj", "rollnjump.score.score_endgame", "rollnjump.score.score" ]
[((1090, 1230), 'hypothesis.strategies.characters', 'characters', ([], {'min_codepoint': '(48)', 'max_codepoint': '(122)', 'blacklist_characters': "[':', ';', '<', '=', '>', '?', '@', '[', '\\\\', ']', '^', '_', '`']"}), "(min_codepoint=48, max_codepoint=122, blacklist_characters=[':',\n ';', '<', '=', '>', '?', '@', '[', '\\\\', ']', '^', '_', '`'])\n", (1100, 1230), False, 'from hypothesis.strategies import characters, integers, text, lists, tuples\n'), ((725, 750), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (740, 750), False, 'import os\n'), ((901, 927), 'rollnjump.main.initialization', 'main.initialization', (['(False)'], {}), '(False)\n', (920, 927), True, 'import rollnjump.main as main\n'), ((932, 950), 'rollnjump.score.score', 'scre.score', (['number'], {}), '(number)\n', (942, 950), True, 'import rollnjump.score as scre\n'), ((955, 981), 'rollnjump.score.score_endgame', 'scre.score_endgame', (['number'], {}), '(number)\n', (973, 981), True, 'import rollnjump.score as scre\n'), ((1005, 1026), 'rollnjump.score.winner_endgame', 'scre.winner_endgame', ([], {}), '()\n', (1024, 1026), True, 'import rollnjump.score as scre\n'), ((1050, 1071), 'rollnjump.score.winner_endgame', 'scre.winner_endgame', ([], {}), '()\n', (1069, 1071), True, 'import rollnjump.score as scre\n'), ((779, 789), 'hypothesis.strategies.integers', 'integers', ([], {}), '()\n', (787, 789), False, 'from hypothesis.strategies import characters, integers, text, lists, tuples\n'), ((1500, 1521), 'hypothesis.strategies.integers', 'integers', ([], {'min_value': '(0)'}), '(min_value=0)\n', (1508, 1521), False, 'from hypothesis.strategies import characters, integers, text, lists, tuples\n'), ((1523, 1542), 'hypothesis.strategies.text', 'text', (['alphanum_char'], {}), '(alphanum_char)\n', (1527, 1542), False, 'from hypothesis.strategies import characters, integers, text, lists, tuples\n'), ((1685, 1707), 'rollnjump.score.init_best_score', 'scre.init_best_score', ([], {}), '()\n', (1705, 1707), True, 'import rollnjump.score as scre\n'), ((1821, 1838), 'rollnjump.score.get_scores', 'scre.get_scores', ([], {}), '()\n', (1836, 1838), True, 'import rollnjump.score as scre\n'), ((1945, 1971), 'rollnjump.score.get_last_best_score', 'scre.get_last_best_score', ([], {}), '()\n', (1969, 1971), True, 'import rollnjump.score as scre\n'), ((2054, 2080), 'rollnjump.score.maj', 'scre.maj', (['(scores[0][0] + 1)'], {}), '(scores[0][0] + 1)\n', (2062, 2080), True, 'import rollnjump.score as scre\n'), ((2241, 2263), 'rollnjump.score.init_best_score', 'scre.init_best_score', ([], {}), '()\n', (2261, 2263), True, 'import rollnjump.score as scre\n'), ((1553, 1594), 'hypothesis.strategies.lists', 'lists', (['score_list'], {'min_size': '(1)', 'max_size': '(5)'}), '(score_list, min_size=1, max_size=5)\n', (1558, 1594), False, 'from hypothesis.strategies import characters, integers, text, lists, tuples\n'), ((2515, 2532), 'rollnjump.score.get_scores', 'scre.get_scores', ([], {}), '()\n', (2530, 2532), True, 'import rollnjump.score as scre\n'), ((1776, 1802), 'rollnjump.score.set_best_score', 'scre.set_best_score', (['score'], {}), '(score)\n', (1795, 1802), True, 'import rollnjump.score as scre\n'), ((2160, 2172), 'rollnjump.score.maj', 'scre.maj', (['(10)'], {}), '(10)\n', (2168, 2172), True, 'import rollnjump.score as scre\n'), ((2225, 2236), 'rollnjump.score.maj', 'scre.maj', (['(1)'], {}), '(1)\n', (2233, 2236), True, 'import rollnjump.score as scre\n'), ((2279, 2285), 'hypothesis.strategies.text', 'text', ([], {}), '()\n', (2283, 2285), False, 'from hypothesis.strategies import characters, integers, text, lists, tuples\n'), ((2768, 2785), 'rollnjump.score.get_scores', 'scre.get_scores', ([], {}), '()\n', (2783, 2785), True, 'import rollnjump.score as scre\n'), ((2092, 2109), 'rollnjump.score.get_scores', 'scre.get_scores', ([], {}), '()\n', (2107, 2109), True, 'import rollnjump.score as scre\n'), ((2186, 2209), 'rollnjump.score.set_best_score', 'scre.set_best_score', (['(10)'], {}), '(10)\n', (2205, 2209), True, 'import rollnjump.score as scre\n')]
# pyenchant # # Copyright (C) 2004-2011, <NAME> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. # # In addition, as a special exception, you are # given permission to link the code of this program with # non-LGPL Spelling Provider libraries (eg: a MSFT Office # spell checker backend) and distribute linked combinations including # the two. You must obey the GNU Lesser General Public License in all # respects for all of the code used other than said providers. If you modify # this file, you may extend this exception to your version of the # file, but you are not obligated to do so. If you do not wish to # do so, delete this exception statement from your version. # """ enchant: Access to the enchant spellchecking library ===================================================== This module provides several classes for performing spell checking via the Enchant spellchecking library. For more details on Enchant, visit the project website: https://abiword.github.io/enchant/ Spellchecking is performed using 'Dict' objects, which represent a language dictionary. Their use is best demonstrated by a quick example:: >>> import enchant >>> d = enchant.Dict("en_US") # create dictionary for US English >>> d.check("enchant") True >>> d.check("enchnt") False >>> d.suggest("enchnt") ['enchant', 'enchants', 'enchanter', 'penchant', 'incant', 'enchain', 'enchanted'] Languages are identified by standard string tags such as "en" (English) and "fr" (French). Specific language dialects can be specified by including an additional code - for example, "en_AU" refers to Australian English. The later form is preferred as it is more widely supported. To check whether a dictionary exists for a given language, the function 'dict_exists' is available. Dictionaries may also be created using the function 'request_dict'. A finer degree of control over the dictionaries and how they are created can be obtained using one or more 'Broker' objects. These objects are responsible for locating dictionaries for a specific language. Note that unicode strings are expected throughout the entire API. Bytestrings should not be passed into any function. Errors that occur in this module are reported by raising subclasses of 'Error'. """ _DOC_ERRORS = ["enchnt", "enchnt", "incant", "fr"] __version__ = "3.2.0" import os import warnings try: from enchant import _enchant as _e except ImportError: if not os.environ.get("PYENCHANT_IGNORE_MISSING_LIB", False): raise _e = None from enchant.errors import Error, DictNotFoundError from enchant.utils import get_default_language from enchant.pypwl import PyPWL class ProviderDesc: """Simple class describing an Enchant provider. Each provider has the following information associated with it: * name: Internal provider name (e.g. "aspell") * desc: Human-readable description (e.g. "Aspell Provider") * file: Location of the library containing the provider """ _DOC_ERRORS = ["desc"] def __init__(self, name, desc, file): self.name = name self.desc = desc self.file = file def __str__(self): return "<Enchant: %s>" % self.desc def __repr__(self): return str(self) def __eq__(self, pd): """Equality operator on ProviderDesc objects.""" return self.name == pd.name and self.desc == pd.desc and self.file == pd.file def __hash__(self): """Hash operator on ProviderDesc objects.""" return hash(self.name + self.desc + self.file) class _EnchantObject: """Base class for enchant objects. This class implements some general functionality for interfacing with the '_enchant' C-library in a consistent way. All public objects from the 'enchant' module are subclasses of this class. All enchant objects have an attribute '_this' which contains the pointer to the underlying C-library object. The method '_check_this' can be called to ensure that this point is not None, raising an exception if it is. """ def __init__(self): """_EnchantObject constructor.""" self._this = None # To be importable when enchant C lib is missing, we need # to create a dummy default broker. if _e is not None: self._init_this() def _check_this(self, msg=None): """Check that self._this is set to a pointer, rather than None.""" if self._this is None: if msg is None: msg = "%s unusable: the underlying C-library object has been freed." msg = msg % (self.__class__.__name__,) raise Error(msg) def _init_this(self): """Initialise the underlying C-library object pointer.""" raise NotImplementedError def _raise_error(self, default="Unspecified Error", eclass=Error): """Raise an exception based on available error messages. This method causes an Error to be raised. Subclasses should override it to retrieve an error indication from the underlying API if possible. If such a message cannot be retrieved, the argument value <default> is used. The class of the exception can be specified using the argument <eclass> """ raise eclass(default) _raise_error._DOC_ERRORS = ["eclass"] def __getstate__(self): """Customize pickling of PyEnchant objects. Since it's not safe for multiple objects to share the same C-library object, we make sure it's unset when pickling. """ state = self.__dict__.copy() state["_this"] = None return state def __setstate__(self, state): self.__dict__.update(state) self._init_this() class Broker(_EnchantObject): """Broker object for the Enchant spellchecker. Broker objects are responsible for locating and managing dictionaries. Unless custom functionality is required, there is no need to use Broker objects directly. The 'enchant' module provides a default broker object so that 'Dict' objects can be created directly. The most important methods of this class include: * :py:meth:`dict_exists`: check existence of a specific language dictionary * :py:meth:`request_dict`: obtain a dictionary for specific language * :py:meth:`set_ordering`: specify which dictionaries to try for a given language. """ def __init__(self): """Broker object constructor. This method is the constructor for the 'Broker' object. No arguments are required. """ super().__init__() def _init_this(self): self._this = _e.broker_init() if not self._this: raise Error("Could not initialise an enchant broker.") self._live_dicts = {} def __del__(self): """Broker object destructor.""" # Calling free() might fail if python is shutting down try: self._free() except (AttributeError, TypeError): pass def __getstate__(self): state = super().__getstate__() state.pop("_live_dicts") return state def _raise_error(self, default="Unspecified Error", eclass=Error): """Overrides _EnchantObject._raise_error to check broker errors.""" err = _e.broker_get_error(self._this) if err == "" or err is None: raise eclass(default) raise eclass(err.decode()) def _free(self): """Free system resource associated with a Broker object. This method can be called to free the underlying system resources associated with a Broker object. It is called automatically when the object is garbage collected. If called explicitly, the Broker and any associated Dict objects must no longer be used. """ if self._this is not None: # During shutdown, this finalizer may be called before # some Dict finalizers. Ensure all pointers are freed. for (dict, count) in list(self._live_dicts.items()): while count: self._free_dict_data(dict) count -= 1 _e.broker_free(self._this) self._this = None def request_dict(self, tag=None): """Request a Dict object for the language specified by <tag>. This method constructs and returns a Dict object for the requested language. 'tag' should be a string of the appropriate form for specifying a language, such as "fr" (French) or "en_AU" (Australian English). The existence of a specific language can be tested using the 'dict_exists' method. If <tag> is not given or is None, an attempt is made to determine the current language in use. If this cannot be determined, Error is raised. .. note:: this method is functionally equivalent to calling the Dict() constructor and passing in the <broker> argument. """ return Dict(tag, self) request_dict._DOC_ERRORS = ["fr"] def _request_dict_data(self, tag): """Request raw C pointer data for a dictionary. This method call passes on the call to the C library, and does some internal bookkeeping. """ self._check_this() new_dict = _e.broker_request_dict(self._this, tag.encode()) if new_dict is None: e_str = "Dictionary for language '%s' could not be found\n" e_str += "Please check https://pyenchant.github.io/pyenchant/ for details" self._raise_error(e_str % (tag,), DictNotFoundError) if new_dict not in self._live_dicts: self._live_dicts[new_dict] = 1 else: self._live_dicts[new_dict] += 1 return new_dict def request_pwl_dict(self, pwl): """Request a Dict object for a personal word list. This method behaves as 'request_dict' but rather than returning a dictionary for a specific language, it returns a dictionary referencing a personal word list. A personal word list is a file of custom dictionary entries, one word per line. """ self._check_this() new_dict = _e.broker_request_pwl_dict(self._this, pwl.encode()) if new_dict is None: e_str = "Personal Word List file '%s' could not be loaded" self._raise_error(e_str % (pwl,)) if new_dict not in self._live_dicts: self._live_dicts[new_dict] = 1 else: self._live_dicts[new_dict] += 1 d = Dict(False) d._switch_this(new_dict, self) return d def _free_dict(self, dict): """Free memory associated with a dictionary. This method frees system resources associated with a Dict object. It is equivalent to calling the object's 'free' method. Once this method has been called on a dictionary, it must not be used again. """ self._free_dict_data(dict._this) dict._this = None dict._broker = None def _free_dict_data(self, dict): """Free the underlying pointer for a dict.""" self._check_this() _e.broker_free_dict(self._this, dict) self._live_dicts[dict] -= 1 if self._live_dicts[dict] == 0: del self._live_dicts[dict] def dict_exists(self, tag): """Check availability of a dictionary. This method checks whether there is a dictionary available for the language specified by 'tag'. It returns True if a dictionary is available, and False otherwise. """ self._check_this() val = _e.broker_dict_exists(self._this, tag.encode()) return bool(val) def set_ordering(self, tag, ordering): """Set dictionary preferences for a language. The Enchant library supports the use of multiple dictionary programs and multiple languages. This method specifies which dictionaries the broker should prefer when dealing with a given language. 'tag' must be an appropriate language specification and 'ordering' is a string listing the dictionaries in order of preference. For example a valid ordering might be "aspell,myspell,ispell". The value of 'tag' can also be set to "*" to set a default ordering for all languages for which one has not been set explicitly. """ self._check_this() _e.broker_set_ordering(self._this, tag.encode(), ordering.encode()) def describe(self): """Return list of provider descriptions. This method returns a list of descriptions of each of the dictionary providers available. Each entry in the list is a ProviderDesc object. """ self._check_this() self.__describe_result = [] _e.broker_describe(self._this, self.__describe_callback) return [ProviderDesc(*r) for r in self.__describe_result] def __describe_callback(self, name, desc, file): """Collector callback for dictionary description. This method is used as a callback into the _enchant function 'enchant_broker_describe'. It collects the given arguments in a tuple and appends them to the list '__describe_result'. """ name = name.decode() desc = desc.decode() file = file.decode() self.__describe_result.append((name, desc, file)) def list_dicts(self): """Return list of available dictionaries. This method returns a list of dictionaries available to the broker. Each entry in the list is a two-tuple of the form: (tag,provider) where <tag> is the language lag for the dictionary and <provider> is a ProviderDesc object describing the provider through which that dictionary can be obtained. """ self._check_this() self.__list_dicts_result = [] _e.broker_list_dicts(self._this, self.__list_dicts_callback) return [(r[0], ProviderDesc(*r[1])) for r in self.__list_dicts_result] def __list_dicts_callback(self, tag, name, desc, file): """Collector callback for listing dictionaries. This method is used as a callback into the _enchant function 'enchant_broker_list_dicts'. It collects the given arguments into an appropriate tuple and appends them to '__list_dicts_result'. """ tag = tag.decode() name = name.decode() desc = desc.decode() file = file.decode() self.__list_dicts_result.append((tag, (name, desc, file))) def list_languages(self): """List languages for which dictionaries are available. This function returns a list of language tags for which a dictionary is available. """ langs = [] for (tag, prov) in self.list_dicts(): if tag not in langs: langs.append(tag) return langs def __describe_dict(self, dict_data): """Get the description tuple for a dict data object. <dict_data> must be a C-library pointer to an enchant dictionary. The return value is a tuple of the form: (<tag>,<name>,<desc>,<file>) """ # Define local callback function cb_result = [] def cb_func(tag, name, desc, file): tag = tag.decode() name = name.decode() desc = desc.decode() file = file.decode() cb_result.append((tag, name, desc, file)) # Actually call the describer function _e.dict_describe(dict_data, cb_func) return cb_result[0] __describe_dict._DOC_ERRORS = ["desc"] def get_param(self, name): """Get the value of a named parameter on this broker. Parameters are used to provide runtime information to individual provider backends. See the method :py:meth:`set_param` for more details. .. warning:: This method does **not** work when using the Enchant C library version 2.0 and above """ param = _e.broker_get_param(self._this, name.encode()) if param is not None: param = param.decode() return param get_param._DOC_ERRORS = ["param"] def set_param(self, name, value): """Set the value of a named parameter on this broker. Parameters are used to provide runtime information to individual provider backends. .. warning:: This method does **not** work when using the Enchant C library version 2.0 and above """ name = name.encode() if value is not None: value = value.encode() _e.broker_set_param(self._this, name, value) class Dict(_EnchantObject): """Dictionary object for the Enchant spellchecker. Dictionary objects are responsible for checking the spelling of words and suggesting possible corrections. Each dictionary is owned by a Broker object, but unless a new Broker has explicitly been created then this will be the 'enchant' module default Broker and is of little interest. The important methods of this class include: * check(): check whether a word id spelled correctly * suggest(): suggest correct spellings for a word * add(): add a word to the user's personal dictionary * remove(): add a word to the user's personal exclude list * add_to_session(): add a word to the current spellcheck session * store_replacement(): indicate a replacement for a given word Information about the dictionary is available using the following attributes: * tag: the language tag of the dictionary * provider: a ProviderDesc object for the dictionary provider """ def __init__(self, tag=None, broker=None): """Dict object constructor. A dictionary belongs to a specific language, identified by the string <tag>. If the tag is not given or is None, an attempt to determine the language currently in use is made using the 'locale' module. If the current language cannot be determined, Error is raised. If <tag> is instead given the value of False, a 'dead' Dict object is created without any reference to a language. This is typically only useful within PyEnchant itself. Any other non-string value for <tag> raises Error. Each dictionary must also have an associated Broker object which obtains the dictionary information from the underlying system. This may be specified using <broker>. If not given, the default broker is used. """ # Initialise misc object attributes to None self.provider = None # If no tag was given, use the default language if tag is None: tag = get_default_language() if tag is None: err = "No tag specified and default language could not " err = err + "be determined." raise Error(err) self.tag = tag # If no broker was given, use the default broker if broker is None: broker = _broker self._broker = broker # Now let the superclass initialise the C-library object super().__init__() def _init_this(self): # Create dead object if False was given as the tag. # Otherwise, use the broker to get C-library pointer data. self._this = None if self.tag: this = self._broker._request_dict_data(self.tag) self._switch_this(this, self._broker) def __del__(self): """Dict object destructor.""" # Calling free() might fail if python is shutting down try: self._free() except AttributeError: pass def _switch_this(self, this, broker): """Switch the underlying C-library pointer for this object. As all useful state for a Dict is stored by the underlying C-library pointer, it is very convenient to allow this to be switched at run-time. Pass a new dict data object into this method to affect the necessary changes. The creating Broker object (at the Python level) must also be provided. This should *never* *ever* be used by application code. It's a convenience for developers only, replacing the clunkier <data> parameter to __init__ from earlier versions. """ # Free old dict data Dict._free(self) # Hook in the new stuff self._this = this self._broker = broker # Update object properties desc = self.__describe(check_this=False) self.tag = desc[0] self.provider = ProviderDesc(*desc[1:]) _switch_this._DOC_ERRORS = ["init"] def _check_this(self, msg=None): """Extend _EnchantObject._check_this() to check Broker validity. It is possible for the managing Broker object to be freed without freeing the Dict. Thus validity checking must take into account self._broker._this as well as self._this. """ if self._broker is None or self._broker._this is None: self._this = None super()._check_this(msg) def _raise_error(self, default="Unspecified Error", eclass=Error): """Overrides _EnchantObject._raise_error to check dict errors.""" err = _e.dict_get_error(self._this) if err == "" or err is None: raise eclass(default) raise eclass(err.decode()) def _free(self): """Free the system resources associated with a Dict object. This method frees underlying system resources for a Dict object. Once it has been called, the Dict object must no longer be used. It is called automatically when the object is garbage collected. """ if self._this is not None: # The broker may have been freed before the dict. # It will have freed the underlying pointers already. if self._broker is not None and self._broker._this is not None: self._broker._free_dict(self) def check(self, word): """Check spelling of a word. This method takes a word in the dictionary language and returns True if it is correctly spelled, and false otherwise. """ self._check_this() # Enchant asserts that the word is non-empty. # Check it up-front to avoid nasty warnings on stderr. if len(word) == 0: raise ValueError("can't check spelling of empty string") val = _e.dict_check(self._this, word.encode()) if val == 0: return True if val > 0: return False self._raise_error() def suggest(self, word): """Suggest possible spellings for a word. This method tries to guess the correct spelling for a given word, returning the possibilities in a list. """ self._check_this() # Enchant asserts that the word is non-empty. # Check it up-front to avoid nasty warnings on stderr. if len(word) == 0: raise ValueError("can't suggest spellings for empty string") suggs = _e.dict_suggest(self._this, word.encode()) return [w.decode() for w in suggs] def add(self, word): """Add a word to the user's personal word list.""" self._check_this() _e.dict_add(self._this, word.encode()) def remove(self, word): """Add a word to the user's personal exclude list.""" self._check_this() _e.dict_remove(self._this, word.encode()) def add_to_pwl(self, word): """Add a word to the user's personal word list.""" warnings.warn( "Dict.add_to_pwl is deprecated, please use Dict.add", category=DeprecationWarning, stacklevel=2, ) self._check_this() _e.dict_add_to_pwl(self._this, word.encode()) def add_to_session(self, word): """Add a word to the session personal list.""" self._check_this() _e.dict_add_to_session(self._this, word.encode()) def remove_from_session(self, word): """Add a word to the session exclude list.""" self._check_this() _e.dict_remove_from_session(self._this, word.encode()) def is_added(self, word): """Check whether a word is in the personal word list.""" self._check_this() return _e.dict_is_added(self._this, word.encode()) def is_removed(self, word): """Check whether a word is in the personal exclude list.""" self._check_this() return _e.dict_is_removed(self._this, word.encode()) def store_replacement(self, mis, cor): """Store a replacement spelling for a miss-spelled word. This method makes a suggestion to the spellchecking engine that the miss-spelled word <mis> is in fact correctly spelled as <cor>. Such a suggestion will typically mean that <cor> appears early in the list of suggested spellings offered for later instances of <mis>. """ if not mis: raise ValueError("can't store replacement for an empty string") if not cor: raise ValueError("can't store empty string as a replacement") self._check_this() _e.dict_store_replacement(self._this, mis.encode(), cor.encode()) store_replacement._DOC_ERRORS = ["mis", "mis"] def __describe(self, check_this=True): """Return a tuple describing the dictionary. This method returns a four-element tuple describing the underlying spellchecker system providing the dictionary. It will contain the following strings: * language tag * name of dictionary provider * description of dictionary provider * dictionary file Direct use of this method is not recommended - instead, access this information through the 'tag' and 'provider' attributes. """ if check_this: self._check_this() _e.dict_describe(self._this, self.__describe_callback) return self.__describe_result def __describe_callback(self, tag, name, desc, file): """Collector callback for dictionary description. This method is used as a callback into the _enchant function 'enchant_dict_describe'. It collects the given arguments in a tuple and stores them in the attribute '__describe_result'. """ tag = tag.decode() name = name.decode() desc = desc.decode() file = file.decode() self.__describe_result = (tag, name, desc, file) class DictWithPWL(Dict): """Dictionary with separately-managed personal word list. .. note:: As of version 1.4.0, enchant manages a per-user pwl and exclude list. This class is now only needed if you want to explicitly maintain a separate word list in addition to the default one. This class behaves as the standard Dict class, but also manages a personal word list stored in a separate file. The file must be specified at creation time by the 'pwl' argument to the constructor. Words added to the dictionary are automatically appended to the pwl file. A personal exclude list can also be managed, by passing another filename to the constructor in the optional 'pel' argument. If this is not given, requests to exclude words are ignored. If either 'pwl' or 'pel' are None, an in-memory word list is used. This will prevent calls to add() and remove() from affecting the user's default word lists. The Dict object managing the PWL is available as the 'pwl' attribute. The Dict object managing the PEL is available as the 'pel' attribute. To create a DictWithPWL from the user's default language, use None as the 'tag' argument. """ _DOC_ERRORS = ["pel", "pel", "PEL", "pel"] def __init__(self, tag, pwl=None, pel=None, broker=None): """DictWithPWL constructor. The argument 'pwl', if not None, names a file containing the personal word list. If this file does not exist, it is created with default permissions. The argument 'pel', if not None, names a file containing the personal exclude list. If this file does not exist, it is created with default permissions. """ super().__init__(tag, broker) if pwl is not None: if not os.path.exists(pwl): f = open(pwl, "wt") f.close() del f self.pwl = self._broker.request_pwl_dict(pwl) else: self.pwl = PyPWL() if pel is not None: if not os.path.exists(pel): f = open(pel, "wt") f.close() del f self.pel = self._broker.request_pwl_dict(pel) else: self.pel = PyPWL() def _check_this(self, msg=None): """Extend Dict._check_this() to check PWL validity.""" if self.pwl is None: self._free() if self.pel is None: self._free() super()._check_this(msg) self.pwl._check_this(msg) self.pel._check_this(msg) def _free(self): """Extend Dict._free() to free the PWL as well.""" if self.pwl is not None: self.pwl._free() self.pwl = None if self.pel is not None: self.pel._free() self.pel = None super()._free() def check(self, word): """Check spelling of a word. This method takes a word in the dictionary language and returns True if it is correctly spelled, and false otherwise. It checks both the dictionary and the personal word list. """ if self.pel.check(word): return False if self.pwl.check(word): return True if super().check(word): return True return False def suggest(self, word): """Suggest possible spellings for a word. This method tries to guess the correct spelling for a given word, returning the possibilities in a list. """ suggs = super().suggest(word) suggs.extend([w for w in self.pwl.suggest(word) if w not in suggs]) for i in range(len(suggs) - 1, -1, -1): if self.pel.check(suggs[i]): del suggs[i] return suggs def add(self, word): """Add a word to the associated personal word list. This method adds the given word to the personal word list, and automatically saves the list to disk. """ self._check_this() self.pwl.add(word) self.pel.remove(word) def remove(self, word): """Add a word to the associated exclude list.""" self._check_this() self.pwl.remove(word) self.pel.add(word) def add_to_pwl(self, word): """Add a word to the associated personal word list. This method adds the given word to the personal word list, and automatically saves the list to disk. """ self._check_this() self.pwl.add_to_pwl(word) self.pel.remove(word) def is_added(self, word): """Check whether a word is in the personal word list.""" self._check_this() return self.pwl.is_added(word) def is_removed(self, word): """Check whether a word is in the personal exclude list.""" self._check_this() return self.pel.is_added(word) ## Create a module-level default broker object, and make its important ## methods available at the module level. _broker = Broker() request_dict = _broker.request_dict request_pwl_dict = _broker.request_pwl_dict dict_exists = _broker.dict_exists list_dicts = _broker.list_dicts list_languages = _broker.list_languages get_param = _broker.get_param set_param = _broker.set_param # Expose the "get_version" function. def get_enchant_version(): """Get the version string for the underlying enchant library.""" return _e.get_version().decode() # Expose the "set_prefix_dir" function. def set_prefix_dir(path): """Set the prefix used by the Enchant library to find its plugins Called automatically when the Python library is imported when required. """ return _e.set_prefix_dir(path) set_prefix_dir._DOC_ERRORS = ["plugins"] def get_user_config_dir(): """Return the path that will be used by some Enchant providers to look for custom dictionaries. """ return _e.get_user_config_dir().decode()
[ "enchant._enchant.dict_describe", "enchant.utils.get_default_language", "enchant._enchant.get_version", "enchant._enchant.broker_free_dict", "enchant._enchant.broker_list_dicts", "enchant._enchant.dict_get_error", "enchant._enchant.broker_get_error", "enchant._enchant.get_user_config_dir", "os.path.exists", "os.environ.get", "enchant._enchant.broker_free", "enchant._enchant.broker_describe", "enchant.pypwl.PyPWL", "enchant._enchant.broker_init", "enchant._enchant.set_prefix_dir", "enchant._enchant.broker_set_param", "enchant.errors.Error", "warnings.warn" ]
[((33475, 33498), 'enchant._enchant.set_prefix_dir', '_e.set_prefix_dir', (['path'], {}), '(path)\n', (33492, 33498), True, 'from enchant import _enchant as _e\n'), ((7430, 7446), 'enchant._enchant.broker_init', '_e.broker_init', ([], {}), '()\n', (7444, 7446), True, 'from enchant import _enchant as _e\n'), ((8081, 8112), 'enchant._enchant.broker_get_error', '_e.broker_get_error', (['self._this'], {}), '(self._this)\n', (8100, 8112), True, 'from enchant import _enchant as _e\n'), ((11997, 12034), 'enchant._enchant.broker_free_dict', '_e.broker_free_dict', (['self._this', 'dict'], {}), '(self._this, dict)\n', (12016, 12034), True, 'from enchant import _enchant as _e\n'), ((13663, 13719), 'enchant._enchant.broker_describe', '_e.broker_describe', (['self._this', 'self.__describe_callback'], {}), '(self._this, self.__describe_callback)\n', (13681, 13719), True, 'from enchant import _enchant as _e\n'), ((14776, 14836), 'enchant._enchant.broker_list_dicts', '_e.broker_list_dicts', (['self._this', 'self.__list_dicts_callback'], {}), '(self._this, self.__list_dicts_callback)\n', (14796, 14836), True, 'from enchant import _enchant as _e\n'), ((16436, 16472), 'enchant._enchant.dict_describe', '_e.dict_describe', (['dict_data', 'cb_func'], {}), '(dict_data, cb_func)\n', (16452, 16472), True, 'from enchant import _enchant as _e\n'), ((17575, 17619), 'enchant._enchant.broker_set_param', '_e.broker_set_param', (['self._this', 'name', 'value'], {}), '(self._this, name, value)\n', (17594, 17619), True, 'from enchant import _enchant as _e\n'), ((22397, 22426), 'enchant._enchant.dict_get_error', '_e.dict_get_error', (['self._this'], {}), '(self._this)\n', (22414, 22426), True, 'from enchant import _enchant as _e\n'), ((24752, 24866), 'warnings.warn', 'warnings.warn', (['"""Dict.add_to_pwl is deprecated, please use Dict.add"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), "('Dict.add_to_pwl is deprecated, please use Dict.add',\n category=DeprecationWarning, stacklevel=2)\n", (24765, 24866), False, 'import warnings\n'), ((27130, 27184), 'enchant._enchant.dict_describe', '_e.dict_describe', (['self._this', 'self.__describe_callback'], {}), '(self._this, self.__describe_callback)\n', (27146, 27184), True, 'from enchant import _enchant as _e\n'), ((3137, 3190), 'os.environ.get', 'os.environ.get', (['"""PYENCHANT_IGNORE_MISSING_LIB"""', '(False)'], {}), "('PYENCHANT_IGNORE_MISSING_LIB', False)\n", (3151, 3190), False, 'import os\n'), ((5383, 5393), 'enchant.errors.Error', 'Error', (['msg'], {}), '(msg)\n', (5388, 5393), False, 'from enchant.errors import Error, DictNotFoundError\n'), ((7492, 7540), 'enchant.errors.Error', 'Error', (['"""Could not initialise an enchant broker."""'], {}), "('Could not initialise an enchant broker.')\n", (7497, 7540), False, 'from enchant.errors import Error, DictNotFoundError\n'), ((8960, 8986), 'enchant._enchant.broker_free', '_e.broker_free', (['self._this'], {}), '(self._this)\n', (8974, 8986), True, 'from enchant import _enchant as _e\n'), ((19810, 19832), 'enchant.utils.get_default_language', 'get_default_language', ([], {}), '()\n', (19830, 19832), False, 'from enchant.utils import get_default_language\n'), ((29773, 29780), 'enchant.pypwl.PyPWL', 'PyPWL', ([], {}), '()\n', (29778, 29780), False, 'from enchant.pypwl import PyPWL\n'), ((30028, 30035), 'enchant.pypwl.PyPWL', 'PyPWL', ([], {}), '()\n', (30033, 30035), False, 'from enchant.pypwl import PyPWL\n'), ((33210, 33226), 'enchant._enchant.get_version', '_e.get_version', ([], {}), '()\n', (33224, 33226), True, 'from enchant import _enchant as _e\n'), ((33697, 33721), 'enchant._enchant.get_user_config_dir', '_e.get_user_config_dir', ([], {}), '()\n', (33719, 33721), True, 'from enchant import _enchant as _e\n'), ((20001, 20011), 'enchant.errors.Error', 'Error', (['err'], {}), '(err)\n', (20006, 20011), False, 'from enchant.errors import Error, DictNotFoundError\n'), ((29573, 29592), 'os.path.exists', 'os.path.exists', (['pwl'], {}), '(pwl)\n', (29587, 29592), False, 'import os\n'), ((29828, 29847), 'os.path.exists', 'os.path.exists', (['pel'], {}), '(pel)\n', (29842, 29847), False, 'import os\n')]
# AUTOGENERATED! DO NOT EDIT! File to edit: 01_utils.ipynb (unless otherwise specified). __all__ = ['to_hhmmss', 'to_secs', 'display_video', 'check_resolution', 'check_fps', 'play_audio', 'change_audio_format', 'trim_audio', 'change_volume', 'loop_audio', 'concat_audios'] # Internal Cell from collections import defaultdict import os import subprocess import time from pathlib import Path from subprocess import CalledProcessError from typing import Dict, Union import cv2 from fastcore.test import * import imageio from IPython.core.display import Video from IPython.display import Audio from nbdev.showdoc import * from pydub import AudioSegment from tqdm import tqdm import numpy as np import platform # Internal Cell class URLs: base = 'https://login.deepword.co:3000/api' credits_url = f'{base}/api_get_credits/' list_vids_url = f'{base}/list_video_api/' txt2speech_url = f'{base}/api_text_to_speech/' download_vid_url = f'{base}/api_download_video/' download_yt_vid_url = f'{base}/api_download_youtube_video/' generate_vid_url = f'{base}/generate_video_api' validate_token_url = f'{base}/check_apikey' api_get_audio_sample = f'{base}/api_get_audio_sample' api_get_video_actors = f'{base}/api_get_video_actors' trim_video = 'https://youtube.deepword.co:5000/api_trim_video' # Internal Cell class AzureDicts: langs = ["arabic_egypt", "arabic_saudi_arabia", "bulgarian", "catalan", "czech", "welsh", "danish", "german_austria", "german_switzerland", "german_germany", "greek", "english_australia", "english_canada", "english_uk", "english_hongkong", "english_ireland", "english_india", "english_new_zealand", "english_philippines", "english_singapore", "english_us", "english_south_africa", "spanish_argentina", "spanish_colombia", "spanish_spain", "spanish_mexico", "spanish_us", "estonian", "finnish", "french_belgium", "french_canada", "french_switzerland", "french_france", "irish", "gujarati", "hebrew", "hindi", "croatian", "hungarian", "indonesian", "italian", "japanese", "korean", "lithuanian", "latvia", "marathi", "malay", "maltese", "norwegian", "dutch_belgium", "dutch_netherlands", "polish", "portuguese_brazil", "portuguese_portugal", "romanian", "russian", "slovak", "slovanian", "swedish", "swahili", "tamil", "telugu", "thai", "turkish", "ukranian", "urdu", "vietnamese", "chinese_mandarin", "chinese_cantonese", "chinese_taiwanese"] codes = ["ar-EG","ar-SA","bg-BG","ca-ES","cs-CZ","cy-GB","da-DK","de-AT", "de-CH","de-DE","el-GR","en-AU","en-CA","en-GB", "en-HK","en-IE","en-IN","en-NZ","en-PH", "en-SG","en-US","en-ZA","es-AR","es-CO", "es-ES","es-MX","es-US","et-EE","fi-FI","fr-BE","fr-CA", "fr-CH","fr-FR","ga-IE","gu-IN","he-IL","hi-IN","hr-HR","hu-HU", "id-ID","it-IT","ja-JP","ko-KR","lt-LT","lv-LV","mr-IN","ms-MY","mt-MT", "nb-NO","nl-BE","nl-NL","pl-PL","pt-BR","pt-PT", "ro-RO","ru-RU","sk-SK","sl-SI","sv-SE","sw-KE","ta-IN","te-IN","th-TH","tr-TR", "uk-UA","ur-PK","vi-VN","zh-CN","zh-HK","zh-TW"] lang2code = dict(zip(langs, codes)) all_speakers = ["ar-EG-SalmaNeural Female","ar-EG-ShakirNeural Male","ar-SA-HamedNeural Male","ar-SA-ZariyahNeural Female","bg-BG-BorislavNeural Male", "bg-BG-KalinaNeural Female","ca-ES-JoanaNeural Female","ca-ES-AlbaNeural Female","ca-ES-EnricNeural Male","cs-CZ-AntoninNeural Male", "cs-CZ-VlastaNeural Female","cy-GB-AledNeural Male","cy-GB-NiaNeural Female","da-DK-ChristelNeural Female","da-DK-JeppeNeural Male", "de-AT-IngridNeural Female","de-AT-JonasNeural Male","de-CH-JanNeural Male","de-CH-LeniNeural Female","de-DE-KatjaNeural Female", "de-DE-ConradNeural Male","el-GR-AthinaNeural Female","el-GR-NestorasNeural Male","en-AU-NatashaNeural Female","en-AU-WilliamNeural Male", "en-CA-ClaraNeural Female","en-CA-LiamNeural Male","en-GB-LibbyNeural Female","en-GB-MiaNeural Female","en-GB-RyanNeural Male", "en-HK-SamNeural Male","en-HK-YanNeural Female","en-IE-ConnorNeural Male","en-IE-EmilyNeural Female","en-IN-NeerjaNeural Female", "en-IN-PrabhatNeural Male","en-NZ-MitchellNeural Male","en-NZ-MollyNeural Female","en-PH-JamesNeural Male","en-PH-RosaNeural Female", "en-SG-LunaNeural Female","en-SG-WayneNeural Male","en-US-JennyNeural Female","en-US-JennyMultilingualNeural Female","en-US-GuyNeural Male", "en-US-AriaNeural Female","en-US-AmberNeural Female","en-US-AnaNeural Female","en-US-AshleyNeural Female","en-US-BrandonNeural Male", "en-US-ChristopherNeural Male","en-US-CoraNeural Female","en-US-ElizabethNeural Female","en-US-EricNeural Male","en-US-JacobNeural Male", "en-US-MichelleNeural Female","en-US-MonicaNeural Female","en-ZA-LeahNeural Female","en-ZA-LukeNeural Male","es-AR-ElenaNeural Female", "es-AR-TomasNeural Male","es-CO-GonzaloNeural Male","es-CO-SalomeNeural Female","es-ES-AlvaroNeural Male","es-ES-ElviraNeural Female", "es-MX-DaliaNeural Female","es-MX-JorgeNeural Male","es-US-AlonsoNeural Male","es-US-PalomaNeural Female","et-EE-AnuNeural Female", "et-EE-KertNeural Male","fi-FI-SelmaNeural Female","fi-FI-HarriNeural Male","fi-FI-NooraNeural Female","fr-BE-CharlineNeural Female", "fr-BE-GerardNeural Male","fr-CA-SylvieNeural Female","fr-CA-AntoineNeural Male","fr-CA-JeanNeural Male","fr-CH-ArianeNeural Female", "fr-CH-FabriceNeural Male","fr-FR-DeniseNeural Female","fr-FR-HenriNeural Male","ga-IE-ColmNeural Male","ga-IE-OrlaNeural Female", "gu-IN-DhwaniNeural Female","gu-IN-NiranjanNeural Male","he-IL-AvriNeural Male","he-IL-HilaNeural Male","hi-IN-MadhurNeural Male", "hi-IN-SwaraNeural Female","hr-HR-GabrijelaNeural Female","hr-HR-SreckoNeural Male","hu-HU-NoemiNeural Female","hu-HU-TamasNeural Male", "id-ID-ArdiNeural Female","id-ID-GadisNeural Male","it-IT-IsabellaNeural Female","it-IT-DiegoNeural Male","it-IT-ElsaNeural Female", "ja-JP-NanamiNeural Female","ja-JP-KeitaNeural Male","ko-KR-SunHiNeural Female","ko-KR-InJoonNeural Male","lt-LT-LeonasNeural Male", "lt-LT-OnaNeural Female","lv-LV-EveritaNeural Female","lv-LV-NilsNeural Male","mr-IN-AarohiNeural Female","mr-IN-ManoharNeural Male", "ms-MY-OsmanNeural Male","ms-MY-YasminNeural Female","mt-MT-GraceNeural Female","mt-MT-JosephNeural Male","nb-NO-PernilleNeural Female", "nb-NO-FinnNeural Male","nb-NO-IselinNeural Female","nl-BE-ArnaudNeural Male","nl-BE-DenaNeural Female","nl-NL-ColetteNeural Female", "nl-NL-FennaNeural Female","nl-NL-MaartenNeural Male","pl-PL-AgnieszkaNeural Female","pl-PL-MarekNeural Male","pl-PL-ZofiaNeural Female", "pt-BR-FranciscaNeural Female","pt-BR-AntonioNeural Male","pt-PT-DuarteNeural Male","pt-PT-FernandaNeural Female","pt-PT-RaquelNeural Female", "ro-RO-AlinaNeural Female","ro-RO-EmilNeural Male","ru-RU-SvetlanaNeural Female","ru-RU-DariyaNeural Female","ru-RU-DmitryNeural Male", "sk-SK-LukasNeural Male","sk-SK-ViktoriaNeural Female","sl-SI-PetraNeural Female","sl-SI-RokNeural Male","sv-SE-SofieNeural Female", "sv-SE-HilleviNeural Female","sv-SE-MattiasNeural Male","sw-KE-RafikiNeural Male","sw-KE-ZuriNeural Female","ta-IN-PallaviNeural Female", "ta-IN-ValluvarNeural Male","te-IN-MohanNeural Male","te-IN-ShrutiNeural Female","th-TH-PremwadeeNeural Female","th-TH-AcharaNeural Female", "th-TH-NiwatNeural Male","tr-TR-AhmetNeural Male","tr-TR-EmelNeural Female","uk-UA-OstapNeural Male","uk-UA-PolinaNeural Female", "ur-PK-AsadNeural Male","ur-PK-UzmaNeural Female","vi-VN-HoaiMyNeural Female","vi-VN-NamMinhNeural Male","zh-CN-XiaoxiaoNeural Female", "zh-CN-YunyangNeural Male","zh-CN-XiaohanNeural Female","zh-CN-XiaomoNeural Female","zh-CN-XiaoruiNeural Female","zh-CN-XiaoxuanNeural Female", "zh-CN-XiaoyouNeural Female","zh-CN-YunxiNeural Male","zh-CN-YunyeNeural Male","zh-HK-HiuMaanNeural Female","zh-HK-HiuGaaiNeural Female", "zh-HK-WanLungNeural Male","zh-TW-HsiaoChenNeural Female","zh-TW-HsiaoYuNeural Female","zh-TW-YunJhe<NAME>"] speakers = defaultdict(list) for lang, code in lang2code.items(): relevant_speakers = [] for s in all_speakers: if code in s: relevant_speakers.append(s) speakers[lang] = relevant_speakers # Internal Cell # class TextDicts: # langs = ["arabic", "bengali", "chinese", "czech", "danish", "dutch", "english_aus", "english_ind", # "english_uk", "english_us", "filipino", "finnish", "french_canada", "french", "german", # "greek", "gujarati", "hindi", "hungarian", "indonesian", "italian", "japanese", "kannada", # "korean", "malayalam", "mandarin", "mandarin_taiwan", "norwegian", "polish", "portuguese_brazil", "portuguese", # "russian", "slovak", "spanish", "swedish", "tamil", "telugu", "thai", "turkish", "ukrainian"] # codes = ["ar-XA", "bn-IN", "yue-HK", "cs-CZ", "da-DK", "nl-NL", "en-AU", "en-IN", "en-GB", # "en-US", "fil-PH", "fi-FI", "fr-CA", "fr-FR", "de-DE", "el-GR", "gu-IN", "hi-IN", # "hu-HU", "id-ID", "it-IT", "ja-JP", "kn-IN", "ko-KR", "ml-IN", "cmn-CN", "cmn-TW", "nb-NO", # "pl-PL", "pt-BR", "pt-PT", "ru-RU", "sk-SK", "es-ES", "sv-SE", "ta-IN", "te-IN", # "th-TH", "tr-TR", "uk-UA", "vi-VN"] # lang2code = dict(zip(langs, codes)) # speakers = { # "arabic": ["ar-XA-Wavenet-A FEMALE","ar-XA-Wavenet-B MALE","ar-XA-Wavenet-C MALE","ar-XA-Standard-A FEMALE","ar-XA-Standard-B MALE","ar-XA-Standard-C MALE","ar-XA-Standard-D FEMALE"], # "bengali": ["bn-IN-Standard-A FEMALE","bn-IN-Standard-B MALE"], # "chinese": ["yue-HK-Standard-A FEMALE","yue-HK-Standard-B MALE","yue-HK-Standard-C FEMALE","yue-HK-Standard-D MALE"], # "czech": ["cs-CZ-Wavenet-A FEMALE","cs-CZ-Standard-A FEMALE"], # "danish": ["da-DK-Wavenet-A FEMALE","da-DK-Wavenet-C MALE","da-DK-Wavenet-D FEMALE","da-DK-Wavenet-E FEMALE","da-DK-Standard-A FEMALE","da-DK-Standard-C MALE","da-DK-Standard-D FEMALE","da-DK-Standard-E FEMALE"], # "dutch": ["nl-NL-Wavenet-A FEMALE","nl-NL-Wavenet-B MALE","nl-NL-Wavenet-C MALE","nl-NL-Wavenet-D FEMALE","nl-NL-Wavenet-E FEMALE","nl-NL-Standard-A FEMALE","nl-NL-Standard-B MALE","nl-NL-Standard-C MALE","nl-NL-Standard-D FEMALE","nl-NL-Standard-E FEMALE"], # "english_aus": ["en-AU-Wavenet-A FEMALE","en-AU-Wavenet-B MALE","en-AU-Wavenet-C FEMALE","en-AU-Wavenet-D MALE","en-AU-Standard-A FEMALE","en-AU-Standard-B MALE","en-AU-Standard-C FEMALE","en-AU-Standard-D MALE"], # "english_ind": ["en-IN-Wavenet-A FEMALE","en-IN-Wavenet-B MALE","en-IN-Wavenet-C MALE","en-IN-Wavenet-D FEMALE","en-IN-Standard-A FEMALE","en-IN-Standard-B MALE","en-IN-Standard-C MALE","en-IN-Standard-D FEMALE"], # "english_uk": ["en-GB-Wavenet-A FEMALE","en-GB-Wavenet-B MALE","en-GB-Wavenet-C FEMALE","en-GB-Wavenet-D MALE","en-GB-Wavenet-F FEMALE","en-GB-Standard-A FEMALE","en-GB-Standard-B MALE","en-GB-Standard-C FEMALE","en-GB-Standard-D MALE","en-GB-Standard-F FEMALE"], # "english_us": ["en-US-Wavenet-A MALE","en-US-Wavenet-B MALE","en-US-Wavenet-C FEMALE","en-US-Wavenet-D MALE","en-US-Wavenet-E FEMALE","en-US-Wavenet-F FEMALE","en-US-Wavenet-G FEMALE","en-US-Wavenet-H FEMALE","en-US-Wavenet-I MALE","en-US-Wavenet-J MALE" ,"en-US-Standard-B MALE","en-US-Standard-C FEMALE","en-US-Standard-D MALE","en-US-Standard-E FEMALE","en-US-Standard-G FEMALE","en-US-Standard-H FEMALE","en-US-Standard-I MALE","en-US-Standard-J MALE"], # "filipino": ["fil-PH-Wavenet-A FEMALE","fil-PH-Wavenet-B FEMALE","fil-PH-Wavenet-C MALE","fil-PH-Wavenet-D MALE","fil-PH-Standard-A FEMALE","fil-PH-Standard-B FEMALE","fil-PH-Standard-C MALE","fil-PH-Standard-D MALE"], # "finnish": ["fi-FI-Wavenet-A FEMALE","fi-FI-Standard-A FEMALE"], # "french_canada": ["fr-CA-Wavenet-A FEMALE","fr-CA-Wavenet-B MALE","fr-CA-Wavenet-C FEMALE","fr-CA-Wavenet-D MALE","fr-CA-Standard-A FEMALE","fr-CA-Standard-B MALE","fr-CA-Standard-C FEMALE","fr-CA-Standard-D MALE"], # "french": ["fr-FR-Wavenet-A FEMALE","fr-FR-Wavenet-B MALE","fr-FR-Wavenet-C FEMALE","fr-FR-Wavenet-D MALE","fr-FR-Wavenet-E FEMALE","fr-FR-Standard-A FEMALE","fr-FR-Standard-B MALE","fr-FR-Standard-C FEMALE","fr-FR-Standard-D MALE","fr-FR-Standard-E FEMALE"], # "german": ["de-DE-Wavenet-A FEMALE","de-DE-Wavenet-B MALE","de-DE-Wavenet-C FEMALE","de-DE-Wavenet-D MALE","de-DE-Wavenet-E MALE","de-DE-Wavenet-F FEMALE","de-DE-Standard-A FEMALE","de-DE-Standard-B MALE","de-DE-Standard-E MALE","de-DE-Standard-F FEMALE"], # "greek": ["el-GR-Wavenet-A FEMALE","el-GR-Standard-A FEMALE"], # "gujarati": ["gu-IN-Standard-A FEMALE","gu-IN-Standard-B MALE"], # "hindi": ["hi-IN-Wavenet-A FEMALE","hi-IN-Wavenet-B MALE","hi-IN-Wavenet-C MALE","hi-IN-Wavenet-D FEMALE","hi-IN-Standard-A FEMALE","hi-IN-Standard-B MALE","hi-IN-Standard-C MALE","hi-IN-Standard-D FEMALE"], # "hungarian": ["hu-HU-Wavenet-A FEMALE","hu-HU-Standard-A FEMALE"], # "indonesian": ["id-ID-Wavenet-A FEMALE","id-ID-Wavenet-B MALE","id-ID-Wavenet-C MALE","id-ID-Wavenet-D FEMALE","id-ID-Standard-A FEMALE","id-ID-Standard-B MALE","id-ID-Standard-C MALE","id-ID-Standard-D FEMALE"], # "italian": ["it-IT-Wavenet-A FEMALE","it-IT-Wavenet-B FEMALE","it-IT-Wavenet-C MALE","it-IT-Wavenet-D MALE","it-IT-Standard-A FEMALE","it-IT-Standard-B FEMALE","it-IT-Standard-C MALE","it-IT-Standard-D MALE"], # "japanese": ["ja-JP-Wavenet-A FEMALE","ja-JP-Wavenet-B FEMALE","ja-JP-Wavenet-C MALE","ja-JP-Wavenet-D MALE","ja-JP-Standard-A FEMALE","ja-JP-Standard-B FEMALE","ja-JP-Standard-C MALE","ja-JP-Standard-D MALE"], # "kannada": ["kn-IN-Standard-A FEMALE","kn-IN-Standard-B MALE"], # "korean": ["ko-KR-Wavenet-A FEMALE","ko-KR-Wavenet-B FEMALE","ko-KR-Wavenet-C MALE","ko-KR-Wavenet-D MALE","ko-KR-Standard-A FEMALE","ko-KR-Standard-B FEMALE","ko-KR-Standard-C MALE","ko-KR-Standard-D MALE"], # "malayalam": ["ml-IN-Standard-A FEMALE","ml-IN-Standard-B MALE"], # "mandarin": ["cmn-CN-Wavenet-A FEMALE","cmn-CN-Wavenet-B MALE","cmn-CN-Wavenet-C MALE","cmn-CN-Wavenet-D FEMALE", "cmn-CN-Standard-A FEMALE","cmn-CN-Standard-B MALE","cmn-CN-Standard-C MALE","cmn-CN-Standard-D FEMALE"], # "mandarin_taiwan": ["cmn-TW-Wavenet-A FEMALE","cmn-TW-Wavenet-B MALE","cmn-TW-Wavenet-C MALE", "cmn-TW-Standard-A FEMALE","cmn-TW-Standard-B MALE","cmn-TW-Standard-C MALE"], # "norwegian": ["nb-NO-Wavenet-A FEMALE","nb-NO-Wavenet-B MALE","nb-no-Wavenet-E FEMALE","nb-NO-Wavenet-C FEMALE","nb-NO-Wavenet-D MALE","nb-NO-Standard-A FEMALE","nb-NO-Standard-B MALE","nb-NO-Standard-C FEMALE","nb-NO-Standard-D MALE","nb-no-Standard-E FEMALE"], # "polish": ["pl-PL-Wavenet-A FEMALE","pl-PL-Wavenet-B MALE","pl-PL-Wavenet-C MALE","pl-PL-Wavenet-D FEMALE","pl-PL-Wavenet-E FEMALE","pl-PL-Standard-A FEMALE","pl-PL-Standard-B MALE","pl-PL-Standard-C MALE","pl-PL-Standard-D FEMALE","pl-PL-Standard-E FEMALE"], # "portuguese_brazil": ["pt-BR-Wavenet-A FEMALE","pt-BR-Standard-A FEMALE"], # "portuguese": ["pt-PT-Wavenet-A FEMALE","pt-PT-Wavenet-B MALE","pt-PT-Wavenet-C MALE","pt-PT-Wavenet-D FEMALE","pt-PT-Standard-A FEMALE","pt-PT-Standard-B MALE","pt-PT-Standard-C MALE","pt-PT-Standard-D FEMALE"], # "russian": ["ru-RU-Wavenet-A FEMALE","ru-RU-Wavenet-B MALE","ru-RU-Wavenet-C FEMALE","ru-RU-Wavenet-D MALE","ru-RU-Wavenet-E FEMALE","ru-RU-Standard-A FEMALE","ru-RU-Standard-B MALE","ru-RU-Standard-C FEMALE","ru-RU-Standard-D MALE","ru-RU-Standard-E FEMALE"], # "slovak": ["sk-SK-Wavenet-A FEMALE","sk-SK-Standard-A FEMALE"], # "spanish": ["es-ES-Wavenet-B MALE","es-ES-Standard-A FEMALE","es-ES-Standard-B MALE"], # "swedish": ["sv-SE-Wavenet-A FEMALE","sv-SE-Standard-A FEMALE"], # "tamil": ["ta-IN-Standard-A FEMALE","ta-IN-Standard-B MALE"], # "telugu": ["te-IN-Standard-A FEMALE","te-IN-Standard-B MALE"], # "thai": ["th-TH-Standard-A FEMALE"], # "turkish": ["tr-TR-Wavenet-A FEMALE","tr-TR-Wavenet-B MALE","tr-TR-Wavenet-C FEMALE","tr-TR-Wavenet-D FEMALE","tr-TR-Wavenet-E MALE","tr-TR-Standard-A FEMALE","tr-TR-Standard-B MALE","tr-TR-Standard-C FEMALE","tr-TR-Standard-D FEMALE","tr-TR-Standard-E MALE"], # "ukrainian": ["uk-UA-Wavenet-A FEMALE","uk-UA-Standard-A FEMALE"], # "vietnamese": ["vi-VN-Wavenet-A FEMALE","vi-VN-Wavenet-B MALE","vi-VN-Wavenet-C FEMALE","vi-VN-Wavenet-D MALE","vi-VN-Standard-A FEMALE FEMALE","vi-VN-Standard-B MALE","vi-VN-Standard-C FEMALE","vi-VN-Standard-D MALE"] # } # Cell def to_hhmmss(x: int) -> str: """Convert time from secs (int) to hh:mm:ss (str). """ if not x >= 0: raise Exception(f'seconds cannot be negative, got {x}') return time.strftime("%H:%M:%S", time.gmtime(x)) # Cell def to_secs(x: str) -> int: """Convert time from hh:mm:ss (str) format to seconds (int). """ h, m, s = x.split(':') return int(h) * 3600 + int(m) * 60 + int(s) # Internal Cell def _remove_duplicate(outfile): if Path(outfile).exists(): os.remove(f'{outfile}') # Internal Cell def _exists(x): return Path(x).exists() # Cell def display_video(video): return Video(video, height = 400, width = 400) # Cell def check_resolution(video: Union[str, Path]) -> Dict: """Check the resolution of a video. """ try: vid = cv2.VideoCapture(video) h, w = vid.get(cv2.CAP_PROP_FRAME_HEIGHT), vid.get(cv2.CAP_PROP_FRAME_WIDTH) return {'height': int(h), 'width': int(w)} except Exception as e: raise ValueError(e) # Cell def check_fps(video: Union[str, Path], round_res = False) -> float: """Get the fps of a video """ reader = imageio.get_reader(video) fps = reader.get_meta_data()['fps'] return fps if not round_res else round(fps) # Cell def play_audio(audio): return Audio(audio) # Internal Cell def _get_parts(x): x = Path(x) ext = x.suffix[1:] return x, ext # Internal Cell def _read_audio(x, ext): return AudioSegment.from_file(x, ext) # Cell def change_audio_format(audio: Union[str, Path], outfile: Union[str, Path]) -> None: """Change the format of audio file. Example, converting mp3 to wav. Works with all formats supported by ffmpeg. """ _remove_duplicate(outfile) outfile, o_ext = _get_parts(outfile) audio, ext = _get_parts(audio) f = _read_audio(audio, ext) f.export(outfile, format = o_ext) # Cell def trim_audio(audio: Union[str, Path], start_time: int, end_time: int, outfile: Union[str, Path] = 'trimmed_audio.mp3') -> None: """Trim an audio file. Start and end times are in seconds. Works with all formats supported by ffmpeg. """ _remove_duplicate(outfile) outfile, o_ext = _get_parts(outfile) audio, ext = _get_parts(audio) f = _read_audio(audio, ext) start_time = start_time * 1000 end_time = end_time * 1000 f = f[start_time:end_time] f.export(outfile, format = o_ext) return outfile # Cell def change_volume(audio: Union[str, Path], vol, outfile = 'changed_vol.mp3'): """Increase or decrease the volume of an audio by 'vol' dB. """ _remove_duplicate(outfile) outfile, o_ext = _get_parts(outfile) audio, ext = _get_parts(audio) f = _read_audio(audio, ext) f += vol f.export(outfile, format = o_ext) return outfile # Cell def loop_audio(audio, times = 2, outfile = 'looped_audio.mp3'): """Loop an audio `times` times. """ _remove_duplicate(outfile) outfile, o_ext = _get_parts(outfile) audio, ext = _get_parts(audio) f = _read_audio(audio, ext) f *= times f.export(outfile, format = o_ext) return outfile # Cell def concat_audios(audio, other_audios, outfile = 'concat_audios.mp3'): """concat audios. Pass a main audio and one or more (list of audios) to concat """ _remove_duplicate(outfile) outfile, o_ext = _get_parts(outfile) audio, ext = _get_parts(audio) f = _read_audio(audio, ext) if not isinstance(other_audios, list): other_audios = [other_audios] for aud in other_audios: aud, extn = _get_parts(aud) f_ = _read_audio(aud, extn) f += f_ f.export(outfile, format = o_ext) return outfile
[ "os.remove", "IPython.core.display.Video", "time.gmtime", "IPython.display.Audio", "collections.defaultdict", "cv2.VideoCapture", "pathlib.Path", "imageio.get_reader", "pydub.AudioSegment.from_file" ]
[((8700, 8717), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (8711, 8717), False, 'from collections import defaultdict\n'), ((17886, 17921), 'IPython.core.display.Video', 'Video', (['video'], {'height': '(400)', 'width': '(400)'}), '(video, height=400, width=400)\n', (17891, 17921), False, 'from IPython.core.display import Video\n'), ((18402, 18427), 'imageio.get_reader', 'imageio.get_reader', (['video'], {}), '(video)\n', (18420, 18427), False, 'import imageio\n'), ((18554, 18566), 'IPython.display.Audio', 'Audio', (['audio'], {}), '(audio)\n', (18559, 18566), False, 'from IPython.display import Audio\n'), ((18611, 18618), 'pathlib.Path', 'Path', (['x'], {}), '(x)\n', (18615, 18618), False, 'from pathlib import Path\n'), ((18713, 18743), 'pydub.AudioSegment.from_file', 'AudioSegment.from_file', (['x', 'ext'], {}), '(x, ext)\n', (18735, 18743), False, 'from pydub import AudioSegment\n'), ((17484, 17498), 'time.gmtime', 'time.gmtime', (['x'], {}), '(x)\n', (17495, 17498), False, 'import time\n'), ((17764, 17787), 'os.remove', 'os.remove', (['f"""{outfile}"""'], {}), "(f'{outfile}')\n", (17773, 17787), False, 'import os\n'), ((18060, 18083), 'cv2.VideoCapture', 'cv2.VideoCapture', (['video'], {}), '(video)\n', (18076, 18083), False, 'import cv2\n'), ((17740, 17753), 'pathlib.Path', 'Path', (['outfile'], {}), '(outfile)\n', (17744, 17753), False, 'from pathlib import Path\n'), ((17828, 17835), 'pathlib.Path', 'Path', (['x'], {}), '(x)\n', (17832, 17835), False, 'from pathlib import Path\n')]
# Copyright (c) 2018 <NAME>, <NAME> # All rights reserved. # # Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. import mSCM import sys import numpy as np from numpy.random import choice from numpy.random import seed import random nbr = int(sys.argv[1]) random.seed(nbr) np.random.seed(nbr) # change the following to point to the directory in which you have cloned the code repository rootdir = "~/vcs/sigmasep" # change the following to point to the directory in which you want to save the output outdir = "/dev/shm/jmooij1/sigmasep" # change the following to point to the directory in which clingo lives clingodir = "/zfs/ivi/causality/opt/clingo-4.5.4-linux-x86_64/" for nbr_do in range(6): mSCM.sample_mSCM_run_all_and_save( d=5,k=2,p=0.3,m=0,nbr=nbr,add_ind_noise_to_A=False, add_ind_noise_to_W=True, include_latent=True, folderpath=outdir+"/mSCM_data/experiment_"+str(nbr_do)+"/", AF=[np.tanh],SC=[1],NOI=['normal'],SD=[1],n=10000, AL =[0.001],MUL=[1000],infty=1000,nbr_do=nbr_do,max_do=1,do_strategy=2, clingodir=clingodir, aspdir=rootdir+"/ASP/" )
[ "numpy.random.seed", "random.seed" ]
[((305, 321), 'random.seed', 'random.seed', (['nbr'], {}), '(nbr)\n', (316, 321), False, 'import random\n'), ((322, 341), 'numpy.random.seed', 'np.random.seed', (['nbr'], {}), '(nbr)\n', (336, 341), True, 'import numpy as np\n')]
import torch import torch.nn as nn class Swish(nn.Module): def __init__(self): super().__init__() def forward(self, x): return x * torch.sigmoid(x) """ class Swish(nn.Module): def forward(self, input): return (input * torch.sigmoid(input)) def __repr__(self): return self.__class__.__name__ + ' ()' """
[ "torch.sigmoid" ]
[((161, 177), 'torch.sigmoid', 'torch.sigmoid', (['x'], {}), '(x)\n', (174, 177), False, 'import torch\n')]
import os import json import socket import logging import asyncio from typing import List from discord import Forbidden from discord.ext import commands from bot import constants from bot.utils.embed_handler import info, thumbnail, success from bot.utils.members import get_member_activity, get_member_status from bot.utils.checks import check_if_it_is_tortoise_guild, tortoise_bot_developer_only from bot.utils.exceptions import ( EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound ) logger = logging.getLogger(__name__) # Keys are endpoint names, values are their functions to be called. _endpoints_mapping = {} buffer_size = 255 maximum_buffer = 10240 def endpoint_register(*, endpoint_key: str = None): """ Decorator to register new socket endpoint. Both sync and async functions can be registered. If endpoint_key is not passed then the name of decorated function is used. Endpoint function return is optional, if there is a return then that return is passed back as key `data` to client, this is dealt in process_request function. Default return is EndpointSuccess().response , see process_request. In case of error, decorated function should raise one of the EndpointError sub-types. If it doesn't explicitly raise but error does happen it is handled in process_request and appropriate response code will be returned to client, this is dealt in process_request function. :param endpoint_key: optional name to use as endpoint key. """ def decorator(function): nonlocal endpoint_key if not endpoint_key: endpoint_key = function.__name__ if endpoint_key in _endpoints_mapping: raise Exception(f"Endpoint {endpoint_key} already registered.") _endpoints_mapping[endpoint_key] = function def wrapper(*args, **kwargs): # Both sync and async support. async_function = asyncio.coroutine(function) loop = asyncio.get_event_loop() loop.run_until_complete(async_function(*args, **kwargs)) return wrapper return decorator class SocketCommunication(commands.Cog): """ Cog dealing with socket communication between the bot and website server. How to register new endpoint: Just decorate it with @endpoint_register Read the docstring of that decorator to know what your endpoint should return/raise. """ def __init__(self, bot): self.bot = bot self.tortoise_guild = bot.get_guild(constants.tortoise_guild_id) self.verified_role = self.tortoise_guild.get_role(constants.verified_role_id) self.new_member_role = self.tortoise_guild.get_role(constants.new_member_role) self.successful_verifications_channel = bot.get_channel(constants.successful_verifications_channel_id) self.general_channel = bot.get_channel(constants.general_channel_id) self.welcome_channel = bot.get_channel(constants.welcome_channel_id) self.verified_emoji = bot.get_emoji(constants.verified_emoji_id) self.verified_clients = set() self.auth_token = os.getenv("SOCKET_AUTH_TOKEN") self._socket_server = SocketCommunication.create_server() self.task = self.bot.loop.create_task(self.run_server(self._socket_server)) def cog_unload(self): logger.debug("Unloading socket comm, closing connections.") self.task.cancel() for client in self.verified_clients: try: client.close() except OSError: # Not supported on Windows pass try: self._socket_server.shutdown(socket.SHUT_RDWR) self._socket_server.close() except OSError: # Not supported on Windows pass logger.info("Socket com unloaded.") @commands.command() @commands.check(check_if_it_is_tortoise_guild) @commands.check(tortoise_bot_developer_only) async def show_endpoints(self, ctx): await ctx.send(" ,".join(_endpoints_mapping)) @staticmethod def create_server(): logger.info("Starting socket comm server...") server = socket.socket() server.bind(("0.0.0.0", int(os.getenv("SOCKET_SERVER_PORT")))) server.listen(3) server.setblocking(False) logger.info("Socket comm server started.") return server async def run_server(self, server: socket.socket): while True: client, _ = await self.bot.loop.sock_accept(server) client_name = client.getpeername() logger.info(f"{client_name} connected.") self.bot.loop.create_task(self.handle_client(client, client_name)) async def handle_client(self, client, client_name: str): while True: # keep receiving client requests until he closes/disconnects request = "" while True: # buffer client request in case of long message try: buffer = (await self.bot.loop.sock_recv(client, buffer_size)).decode("utf8") request += buffer except ConnectionResetError: # If the client disconnects without sending quit. logger.info(f"{client_name} disconnected.") return if len(buffer) < buffer_size: break elif len(request) > maximum_buffer: response = EndpointError(400, "Buffer size exceeded.").response await self.send_to_client(client, json.dumps(response)) client.close() return if not request: logger.info("Empty request, closing.") break try: request = json.loads(request) except json.JSONDecodeError: response = EndpointError(400, "Not a valid JSON formatted request.").response await self.send_to_client(client, json.dumps(response)) logger.debug(f"{client_name}:{response}:{request}") continue logger.debug(f"Server got:{request}") # TODO # temporal hardcoded fix to make ping endpoint public endpoint_key = request.get("endpoint") if client not in self.verified_clients and endpoint_key != "ping": token = request.get("auth") if token is not None and token == self.auth_token: self.verified_clients.add(client) response = EndpointSuccess().response await self.send_to_client(client, json.dumps(response)) logger.info(f"{client_name} successfully authorized.") continue else: response = EndpointError(401, "Verification unsuccessful, closing conn..").response await self.send_to_client(client, json.dumps(response)) logger.debug(f"{client_name}:{response}:{request}") break response = await self.process_request(request) logger.debug(f"Request processed, response:{response}") await self.send_to_client(client, json.dumps(response)) logger.info(f"Closing {client_name}") self.verified_clients.discard(client) client.close() async def send_to_client(self, client, msg: str): """ Send response message to specified client. """ try: await self.bot.loop.sock_sendall(client, bytes(msg.encode("unicode_escape"))) except BrokenPipeError: # If the client closes the connection too quickly or just does't even bother listening to response we'll # get this, so just ignore pass async def process_request(self, request: dict) -> dict: """ This should be called for each client request. Parses requests and deals with any errors and responses to client. :param request: dict which has to be formatted as follows: { "endpoint": "string which endpoint to use", "data": [optional] data to be used on endpoint function (list of member IDs etc) } Endpoint is available if it was decorated with @endpoint_register """ if not isinstance(request, dict): logger.critical("Error processing socket comm, request is not a dict.") return InternalServerError().response endpoint_key = request.get("endpoint") if not endpoint_key: return EndpointError(400, "No endpoint specified.").response elif not isinstance(endpoint_key, str): return EndpointError(400, "Endpoint name has to be a string.").response function = _endpoints_mapping.get(endpoint_key) if function is None: return EndpointNotFound().response endpoint_data = request.get("data") try: # Key data is optional if not endpoint_data: endpoint_returned_data = await function(self) else: endpoint_returned_data = await function(self, endpoint_data) except TypeError as e: logger.critical(f"Bad arguments for endpoint {endpoint_key} {endpoint_data} {e}") return EndpointBadArguments().response except EndpointError as e: # If endpoint function raises then return it's response return e.response except Exception as e: logger.critical(f"Error processing socket endpoint: {endpoint_key} , data:{endpoint_data} {e}") return InternalServerError().response # If we've come all the way here then no errors occurred and endpoint function executed correctly. server_response = EndpointSuccess().response # Endpoint return data is optional if endpoint_returned_data is None: return server_response else: server_response.update({"data": endpoint_returned_data}) return endpoint_returned_data @endpoint_register(endpoint_key="send") async def send(self, data: dict): """ Makes the bot send requested message channel or user or both. :param data: dict in format { "channel_id": 123, "user_id": 123, "message": "Test" } Where both channel_id and user_id are optional but at least one has to be passed. Message is the message to send. """ message = data.get("message") if message is None: raise EndpointBadArguments() channel_id = data.get("channel_id") user_id = data.get("user_id") if channel_id is None and user_id is None: raise EndpointBadArguments() channel = self.bot.get_channel(channel_id) user = self.bot.get_user(user_id) if channel is None and user is None: raise DiscordIDNotFound() if channel is not None: await channel.send(embed=thumbnail(message, self.bot.user)) if user is not None: try: await user.send(embed=thumbnail(message, self.bot.user, "A message just for you!")) except Forbidden: logger.info(f"Skipping send endpoint to {user} as he blocked DMs.") @endpoint_register(endpoint_key="member_activities") async def get_member_data(self, members: List[int]) -> dict: """ Gets activities and top role from all members passed in param members. :param members: list of member ids to get activity and top role from :return: dict in form: { 'status': 200, 'data': { 'member_id': {"activity": "bla_bla", "top_role": "role name"}, ... } } """ response_data = {} logger.debug(f"Processing members: {members}") for member_id in members: member = self.tortoise_guild.get_member(member_id) member_data = {"activity": "NOT FOUND", "top_role": "NOT FOUND"} if member is None: logger.debug(f"Member {member_id} not found.") response_data[member_id] = member_data continue activity = get_member_activity(member) if activity is None: activity = get_member_status(member) member_data["activity"] = activity member_data["top_role"] = member.top_role.name response_data[member_id] = member_data return_data = {"data": response_data} return return_data @endpoint_register(endpoint_key="verify") async def verify_member(self, member_id: str): """ Adds verified role to the member and also sends success messages. :param member_id: str member id to verify """ try: member_id = int(member_id) except ValueError: raise EndpointBadArguments() none_checks = ( self.tortoise_guild, self.verified_role, self.new_member_role, self.successful_verifications_channel, self.welcome_channel ) for check_none in none_checks: if check_none is None: logger.warning(f"One of necessary IDs was not found {none_checks}") raise DiscordIDNotFound() # Attempt to fix bug with verification where sometimes member is not found in cache even if they are in guild tortoise_guild = self.bot.get_guild(constants.tortoise_guild_id) member = tortoise_guild.get_member(member_id) if member is None: logger.critical(f"Can't verify, member is not found in guild {member} {member_id}") raise DiscordIDNotFound() await member.add_roles(self.verified_role, self.new_member_role, reason="Completed Oauth2 Verification") await self.successful_verifications_channel.send( embed=info(f"{member} is now verified.", member.guild.me, title="") ) msg = ( f"You are now verified {self.verified_emoji}\n\n" f"Make sure to read {self.welcome_channel.mention}" ) await self.general_channel.send( member.mention, embed=info(f"Say hi to our newest member {member.mention}", member.guild.me, title=""), delete_after=100 ) await member.send(embed=success(msg)) @endpoint_register() async def contact(self, data: dict): """ Sends request data to website log channel. :param data: dict data from the request """ guild = self.bot.get_guild(constants.tortoise_guild_id) website_log_channel = guild.get_channel(constants.website_log_channel_id) for check_none in (guild, website_log_channel): if check_none is None: raise DiscordIDNotFound() await website_log_channel.send(f"{data}") @endpoint_register() async def signal_update(self, signal: str): """ Signals the bot it should update something locally like cache by fetching it from database. :param signal: can be: 'rules' signals updating rules 'server_meta' signals updating server meta """ # Don not await here as API is waiting for response, (for some reason it sends signal and only updates db after # receiving any response). Use create_task instead. if signal == "rules": tortoise_server_cog = self.bot.get_cog("TortoiseServer") self.bot.loop.create_task(tortoise_server_cog.refresh_rules_helper()) elif signal == "server_meta": self.bot.loop.create_task(self.bot.reload_tortoise_meta_cache()) else: raise EndpointBadArguments() @endpoint_register() async def ping(self): if self.bot.is_closed(): raise EndpointError(503, "VPS online but Discord websocket closed.") def setup(bot): bot.add_cog(SocketCommunication(bot))
[ "bot.utils.embed_handler.info", "bot.utils.exceptions.EndpointSuccess", "discord.ext.commands.check", "socket.socket", "bot.utils.exceptions.EndpointError", "json.dumps", "bot.utils.embed_handler.success", "bot.utils.embed_handler.thumbnail", "discord.ext.commands.command", "asyncio.coroutine", "json.loads", "bot.utils.exceptions.EndpointNotFound", "asyncio.get_event_loop", "bot.utils.exceptions.EndpointBadArguments", "bot.utils.exceptions.InternalServerError", "bot.utils.members.get_member_activity", "os.getenv", "bot.utils.members.get_member_status", "bot.utils.exceptions.DiscordIDNotFound", "logging.getLogger" ]
[((562, 589), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (579, 589), False, 'import logging\n'), ((3921, 3939), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (3937, 3939), False, 'from discord.ext import commands\n'), ((3945, 3990), 'discord.ext.commands.check', 'commands.check', (['check_if_it_is_tortoise_guild'], {}), '(check_if_it_is_tortoise_guild)\n', (3959, 3990), False, 'from discord.ext import commands\n'), ((3996, 4039), 'discord.ext.commands.check', 'commands.check', (['tortoise_bot_developer_only'], {}), '(tortoise_bot_developer_only)\n', (4010, 4039), False, 'from discord.ext import commands\n'), ((3188, 3218), 'os.getenv', 'os.getenv', (['"""SOCKET_AUTH_TOKEN"""'], {}), "('SOCKET_AUTH_TOKEN')\n", (3197, 3218), False, 'import os\n'), ((4250, 4265), 'socket.socket', 'socket.socket', ([], {}), '()\n', (4263, 4265), False, 'import socket\n'), ((1988, 2015), 'asyncio.coroutine', 'asyncio.coroutine', (['function'], {}), '(function)\n', (2005, 2015), False, 'import asyncio\n'), ((2035, 2059), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2057, 2059), False, 'import asyncio\n'), ((9983, 10000), 'bot.utils.exceptions.EndpointSuccess', 'EndpointSuccess', ([], {}), '()\n', (9998, 10000), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((10782, 10804), 'bot.utils.exceptions.EndpointBadArguments', 'EndpointBadArguments', ([], {}), '()\n', (10802, 10804), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((10958, 10980), 'bot.utils.exceptions.EndpointBadArguments', 'EndpointBadArguments', ([], {}), '()\n', (10978, 10980), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((11139, 11158), 'bot.utils.exceptions.DiscordIDNotFound', 'DiscordIDNotFound', ([], {}), '()\n', (11156, 11158), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((12484, 12511), 'bot.utils.members.get_member_activity', 'get_member_activity', (['member'], {}), '(member)\n', (12503, 12511), False, 'from bot.utils.members import get_member_activity, get_member_status\n'), ((13968, 13987), 'bot.utils.exceptions.DiscordIDNotFound', 'DiscordIDNotFound', ([], {}), '()\n', (13985, 13987), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((16148, 16210), 'bot.utils.exceptions.EndpointError', 'EndpointError', (['(503)', '"""VPS online but Discord websocket closed."""'], {}), "(503, 'VPS online but Discord websocket closed.')\n", (16161, 16210), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((5890, 5909), 'json.loads', 'json.loads', (['request'], {}), '(request)\n', (5900, 5909), False, 'import json\n'), ((8618, 8639), 'bot.utils.exceptions.InternalServerError', 'InternalServerError', ([], {}), '()\n', (8637, 8639), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((8746, 8790), 'bot.utils.exceptions.EndpointError', 'EndpointError', (['(400)', '"""No endpoint specified."""'], {}), "(400, 'No endpoint specified.')\n", (8759, 8790), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((9038, 9056), 'bot.utils.exceptions.EndpointNotFound', 'EndpointNotFound', ([], {}), '()\n', (9054, 9056), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((12572, 12597), 'bot.utils.members.get_member_status', 'get_member_status', (['member'], {}), '(member)\n', (12589, 12597), False, 'from bot.utils.members import get_member_activity, get_member_status\n'), ((13174, 13196), 'bot.utils.exceptions.EndpointBadArguments', 'EndpointBadArguments', ([], {}), '()\n', (13194, 13196), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((13560, 13579), 'bot.utils.exceptions.DiscordIDNotFound', 'DiscordIDNotFound', ([], {}), '()\n', (13577, 13579), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((15094, 15113), 'bot.utils.exceptions.DiscordIDNotFound', 'DiscordIDNotFound', ([], {}), '()\n', (15111, 15113), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((16022, 16044), 'bot.utils.exceptions.EndpointBadArguments', 'EndpointBadArguments', ([], {}), '()\n', (16042, 16044), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((4302, 4333), 'os.getenv', 'os.getenv', (['"""SOCKET_SERVER_PORT"""'], {}), "('SOCKET_SERVER_PORT')\n", (4311, 4333), False, 'import os\n'), ((7355, 7375), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (7365, 7375), False, 'import json\n'), ((8867, 8922), 'bot.utils.exceptions.EndpointError', 'EndpointError', (['(400)', '"""Endpoint name has to be a string."""'], {}), "(400, 'Endpoint name has to be a string.')\n", (8880, 8922), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((9495, 9517), 'bot.utils.exceptions.EndpointBadArguments', 'EndpointBadArguments', ([], {}), '()\n', (9515, 9517), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((9818, 9839), 'bot.utils.exceptions.InternalServerError', 'InternalServerError', ([], {}), '()\n', (9837, 9839), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((14178, 14239), 'bot.utils.embed_handler.info', 'info', (['f"""{member} is now verified."""', 'member.guild.me'], {'title': '""""""'}), "(f'{member} is now verified.', member.guild.me, title='')\n", (14182, 14239), False, 'from bot.utils.embed_handler import info, thumbnail, success\n'), ((14477, 14562), 'bot.utils.embed_handler.info', 'info', (['f"""Say hi to our newest member {member.mention}"""', 'member.guild.me'], {'title': '""""""'}), "(f'Say hi to our newest member {member.mention}', member.guild.me, title=''\n )\n", (14481, 14562), False, 'from bot.utils.embed_handler import info, thumbnail, success\n'), ((14630, 14642), 'bot.utils.embed_handler.success', 'success', (['msg'], {}), '(msg)\n', (14637, 14642), False, 'from bot.utils.embed_handler import info, thumbnail, success\n'), ((5978, 6035), 'bot.utils.exceptions.EndpointError', 'EndpointError', (['(400)', '"""Not a valid JSON formatted request."""'], {}), "(400, 'Not a valid JSON formatted request.')\n", (5991, 6035), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((6674, 6691), 'bot.utils.exceptions.EndpointSuccess', 'EndpointSuccess', ([], {}), '()\n', (6689, 6691), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((6934, 6997), 'bot.utils.exceptions.EndpointError', 'EndpointError', (['(401)', '"""Verification unsuccessful, closing conn.."""'], {}), "(401, 'Verification unsuccessful, closing conn..')\n", (6947, 6997), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((11229, 11262), 'bot.utils.embed_handler.thumbnail', 'thumbnail', (['message', 'self.bot.user'], {}), '(message, self.bot.user)\n', (11238, 11262), False, 'from bot.utils.embed_handler import info, thumbnail, success\n'), ((5549, 5592), 'bot.utils.exceptions.EndpointError', 'EndpointError', (['(400)', '"""Buffer size exceeded."""'], {}), "(400, 'Buffer size exceeded.')\n", (5562, 5592), False, 'from bot.utils.exceptions import EndpointNotFound, EndpointBadArguments, EndpointError, EndpointSuccess, InternalServerError, DiscordIDNotFound\n'), ((6095, 6115), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (6105, 6115), False, 'import json\n'), ((6755, 6775), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (6765, 6775), False, 'import json\n'), ((7061, 7081), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (7071, 7081), False, 'import json\n'), ((11349, 11409), 'bot.utils.embed_handler.thumbnail', 'thumbnail', (['message', 'self.bot.user', '"""A message just for you!"""'], {}), "(message, self.bot.user, 'A message just for you!')\n", (11358, 11409), False, 'from bot.utils.embed_handler import info, thumbnail, success\n'), ((5656, 5676), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (5666, 5676), False, 'import json\n')]
# 引入 sqlite 套件 import sqlite3 import numpy as np import matplotlib.pyplot as plt # %matplotlib inline #定義資料庫位置 conn = sqlite3.connect('database.db') db_connection = conn.cursor() List_Ecg_Signal = [] ## 空列表 #t查詢數據 rows = db_connection.execute("SELECT serialno,time,length,date,ecg,qrs,beat,feature,measurement,marker,scale,parameter FROM Records;") for row in rows: # print ("serialno = ", row[0]) # print ("time = ", row[1]) # print ("length = ", row[2]) # print ("date = ", row[3]) # print ("ecg = ", np.frombuffer(row[4], dtype='<f4'),"\n") # print ("qrs = ", row[5]) # print ("beat = ", row[6]) # print ("feature = ", row[7].hex()) # print ("measurement = ", row[8].hex()) # print ("marker = ", row[9].hex()) # print ("scale = ", row[10],"\n") # print ("parameter = ", binascii.hexlify(row[11])) # print("parameter = ",float.fromhex(row[11].hex()),"\n" ) # print("parameter = ", np.frombuffer(row[11], dtype=np.float32),"\n" ) List_Ecg_Signal.append(np.frombuffer(row[4], dtype='<f4')) db_connection.close()
[ "numpy.frombuffer", "sqlite3.connect" ]
[((119, 149), 'sqlite3.connect', 'sqlite3.connect', (['"""database.db"""'], {}), "('database.db')\n", (134, 149), False, 'import sqlite3\n'), ((1028, 1062), 'numpy.frombuffer', 'np.frombuffer', (['row[4]'], {'dtype': '"""<f4"""'}), "(row[4], dtype='<f4')\n", (1041, 1062), True, 'import numpy as np\n')]
import copy import cv2 import glob import json import numpy as np import os from .box_utils import compute_box_3d, boxes_to_corners_3d, get_size from .rotation import convert_angle_axis_to_matrix3 from .taxonomy import class_names, ARKitDatasetConfig def TrajStringToMatrix(traj_str): """ convert traj_str into translation and rotation matrices Args: traj_str: A space-delimited file where each line represents a camera position at a particular timestamp. The file has seven columns: * Column 1: timestamp * Columns 2-4: rotation (axis-angle representation in radians) * Columns 5-7: translation (usually in meters) Returns: ts: translation matrix Rt: rotation matrix """ # line=[float(x) for x in traj_str.split()] # ts = line[0]; # R = cv2.Rodrigues(np.array(line[1:4]))[0]; # t = np.array(line[4:7]); # Rt = np.concatenate((np.concatenate((R, t[:,np.newaxis]), axis=1), [[0.0,0.0,0.0,1.0]]), axis=0) tokens = traj_str.split() assert len(tokens) == 7 ts = tokens[0] # Rotation in angle axis angle_axis = [float(tokens[1]), float(tokens[2]), float(tokens[3])] r_w_to_p = convert_angle_axis_to_matrix3(np.asarray(angle_axis)) # Translation t_w_to_p = np.asarray([float(tokens[4]), float(tokens[5]), float(tokens[6])]) extrinsics = np.eye(4, 4) extrinsics[:3, :3] = r_w_to_p extrinsics[:3, -1] = t_w_to_p Rt = np.linalg.inv(extrinsics) return (ts, Rt) def st2_camera_intrinsics(filename): w, h, fx, fy, hw, hh = np.loadtxt(filename) return np.asarray([[fx, 0, hw], [0, fy, hh], [0, 0, 1]]) def generate_point( rgb_image, depth_image, intrinsic, subsample=1, world_coordinate=True, pose=None, ): """Generate 3D point coordinates and related rgb feature Args: rgb_image: (h, w, 3) rgb depth_image: (h, w) depth intrinsic: (3, 3) subsample: int resize stride world_coordinate: bool pose: (4, 4) matrix transfer from camera to world coordindate Returns: points: (N, 3) point cloud coordinates in world-coordinates if world_coordinate==True else in camera coordinates rgb_feat: (N, 3) rgb feature of each point """ intrinsic_4x4 = np.identity(4) intrinsic_4x4[:3, :3] = intrinsic u, v = np.meshgrid( range(0, depth_image.shape[1], subsample), range(0, depth_image.shape[0], subsample), ) d = depth_image[v, u] d_filter = d != 0 mat = np.vstack( ( u[d_filter] * d[d_filter], v[d_filter] * d[d_filter], d[d_filter], np.ones_like(u[d_filter]), ) ) new_points_3d = np.dot(np.linalg.inv(intrinsic_4x4), mat)[:3] if world_coordinate: new_points_3d_padding = np.vstack( (new_points_3d, np.ones((1, new_points_3d.shape[1]))) ) world_coord_padding = np.dot(pose, new_points_3d_padding) new_points_3d = world_coord_padding[:3] rgb_feat = rgb_image[v, u][d_filter] return new_points_3d.T, rgb_feat def extract_gt(gt_fn): """extract original label data Args: gt_fn: str (file name of "annotation.json") after loading, we got a dict with keys 'data', 'stats', 'comment', 'confirm', 'skipped' ['data']: a list of dict for bboxes, each dict has keys: 'uid', 'label', 'modelId', 'children', 'objectId', 'segments', 'hierarchy', 'isInGroup', 'labelType', 'attributes' 'label': str 'segments': dict for boxes 'centroid': list of float (x, y, z)? 'axesLengths': list of float (x, y, z)? 'normalizedAxes': list of float len()=9 'uid' 'comments': 'stats': ... Returns: skipped: bool skipped or not boxes_corners: (n, 8, 3) box corners **world-coordinate** centers: (n, 3) **world-coordinate** sizes: (n, 3) full-sizes (no halving!) labels: list of str uids: list of str """ gt = json.load(open(gt_fn, "r")) skipped = gt['skipped'] if len(gt) == 0: boxes_corners = np.zeros((0, 8, 3)) centers = np.zeros((0, 3)) sizes = np.zeros((0, 3)) labels, uids = [], [] return skipped, boxes_corners, centers, sizes, labels, uids boxes_corners = [] centers = [] sizes = [] labels = [] uids = [] for data in gt['data']: l = data["label"] for delimiter in [" ", "-", "/"]: l = l.replace(delimiter, "_") if l not in class_names: print("unknown category: %s" % l) continue rotmat = np.array(data["segments"]["obbAligned"]["normalizedAxes"]).reshape( 3, 3 ) center = np.array(data["segments"]["obbAligned"]["centroid"]).reshape(-1, 3) size = np.array(data["segments"]["obbAligned"]["axesLengths"]).reshape(-1, 3) box3d = compute_box_3d(size.reshape(3).tolist(), center, rotmat) ''' Box corner order that we return is of the format below: 6 -------- 7 /| /| 5 -------- 4 . | | | | . 2 -------- 3 |/ |/ 1 -------- 0 ''' boxes_corners.append(box3d.reshape(1, 8, 3)) size = np.array(get_size(box3d)).reshape(1, 3) center = np.mean(box3d, axis=0).reshape(1, 3) # boxes_corners.append(box3d.reshape(1, 8, 3)) centers.append(center) sizes.append(size) # labels.append(l) labels.append(data["label"]) uids.append(data["uid"]) centers = np.concatenate(centers, axis=0) sizes = np.concatenate(sizes, axis=0) boxes_corners = np.concatenate(boxes_corners, axis=0) return skipped, boxes_corners, centers, sizes, labels, uids class TenFpsDataLoader(object): def __init__( self, dataset_cfg, class_names, root_path=None, gt_path=None, logger=None, frame_rate=1, with_color_image=True, subsample=2, world_coordinate=True, ): """ Args: dataset_cfg: EasyDict() with key POINT_CLOUD_RANGE POINT_FEATURE_ENCODING DATA_PROCESSOR class_names: list of str root_path: path with all info for a scene_id color, color_2det, depth, label, vote, ... gt_path: xxx.json just to get correct floor height an2d_root: path to scene_id.json or None logger: frame_rate: int subsample: int world_coordinate: bool """ self.root_path = root_path # pipeline does box residual coding here self.num_class = len(class_names) self.dc = ARKitDatasetConfig() depth_folder = os.path.join(self.root_path, "lowres_depth") if not os.path.exists(depth_folder): self.frame_ids = [] else: depth_images = sorted(glob.glob(os.path.join(depth_folder, "*.png"))) self.frame_ids = [os.path.basename(x) for x in depth_images] self.frame_ids = [x.split(".png")[0].split("_")[1] for x in self.frame_ids] self.video_id = depth_folder.split('/')[-3] self.frame_ids = [x for x in self.frame_ids] self.frame_ids.sort() self.intrinsics = {} traj_file = os.path.join(self.root_path, 'lowres_wide.traj') with open(traj_file) as f: self.traj = f.readlines() # convert traj to json dict poses_from_traj = {} for line in self.traj: traj_timestamp = line.split(" ")[0] poses_from_traj[f"{round(float(traj_timestamp), 3):.3f}"] = TrajStringToMatrix(line)[1].tolist() if os.path.exists(traj_file): # self.poses = json.load(open(traj_file)) self.poses = poses_from_traj else: self.poses = {} # get intrinsics for frame_id in self.frame_ids: intrinsic_fn = os.path.join(self.root_path, "lowres_wide_intrinsics", f"{self.video_id}_{frame_id}.pincam") if not os.path.exists(intrinsic_fn): intrinsic_fn = os.path.join(self.root_path, "lowres_wide_intrinsics", f"{self.video_id}_{float(frame_id) - 0.001:.3f}.pincam") if not os.path.exists(intrinsic_fn): intrinsic_fn = os.path.join(self.root_path, "lowres_wide_intrinsics", f"{self.video_id}_{float(frame_id) + 0.001:.3f}.pincam") if not os.path.exists(intrinsic_fn): print("frame_id", frame_id) print(intrinsic_fn) self.intrinsics[frame_id] = st2_camera_intrinsics(intrinsic_fn) # # intrinsic_fn = os.path.join(self.root_path, "camera.txt") # intrinsic_fn = os.path.join(self.root_path, "color.pincam") # if os.path.exists(intrinsic_fn): # self.intrinsics = st2_camera_intrinsics(intrinsic_fn) # else: # self.intrinsics = None self.frame_rate = frame_rate self.subsample = subsample self.with_color_image = with_color_image self.world_coordinate = world_coordinate if gt_path is not None and os.path.exists(gt_path): skipped, gt_corners, gt_centers, gt_sizes, _, _ = extract_gt(gt_path) self.gt_corners = gt_corners self.gt_centers = gt_centers self.gt_sizes = gt_sizes else: self.gt_corners = None self.gt_centers = None self.gt_sizes = None def __iter__(self): return self def __len__(self): return len(self.frame_ids) def __getitem__(self, idx): """ Returns: frame: a dict {frame_id}: str {depth}: (h, w) {image}: (h, w) {image_path}: str {intrinsics}: np.array 3x3 {pose}: np.array 4x4 {pcd}: np.array (n, 3) in world coordinate {color}: (n, 3) """ frame_id = self.frame_ids[idx] frame = {} frame["frame_id"] = frame_id fname = "{}_{}.png".format(self.video_id, frame_id) # fname = "{}.png".format(frame_id) depth_image_path = os.path.join(self.root_path, "lowres_depth", fname) if not os.path.exists(depth_image_path): print(depth_image_path) image_path = os.path.join(self.root_path, "lowres_wide", fname) if not os.path.exists(depth_image_path): print(depth_image_path, "does not exist") frame["depth"] = cv2.imread(depth_image_path, -1) frame["image"] = cv2.imread(image_path) frame["image_path"] = image_path depth_height, depth_width = frame["depth"].shape im_height, im_width, im_channels = frame["image"].shape frame["intrinsics"] = copy.deepcopy(self.intrinsics[frame_id]) if str(frame_id) in self.poses.keys(): frame_pose = np.array(self.poses[str(frame_id)]) else: for my_key in list(self.poses.keys()): if abs(float(frame_id) - float(my_key)) < 0.005: frame_pose = np.array(self.poses[str(my_key)]) frame["pose"] = copy.deepcopy(frame_pose) im_height_scale = np.float(depth_height) / im_height im_width_scale = np.float(depth_width) / im_width if depth_height != im_height: frame["image"] = np.zeros([depth_height, depth_width, 3]) # 288, 384, 3 frame["image"][48 : 48 + 192, 64 : 64 + 256, :] = cv2.imread(image_path) (m, n, _) = frame["image"].shape depth_image = frame["depth"] / 1000.0 rgb_image = frame["image"] / 255.0 pcd, rgb_feat = generate_point( rgb_image, depth_image, frame["intrinsics"], self.subsample, self.world_coordinate, frame_pose, ) frame["pcd"] = pcd frame["color"] = rgb_feat return frame
[ "copy.deepcopy", "numpy.ones_like", "os.path.basename", "numpy.asarray", "numpy.zeros", "numpy.identity", "os.path.exists", "numpy.float", "numpy.ones", "cv2.imread", "numpy.mean", "numpy.linalg.inv", "numpy.loadtxt", "numpy.array", "numpy.dot", "numpy.eye", "os.path.join", "numpy.concatenate" ]
[((1363, 1375), 'numpy.eye', 'np.eye', (['(4)', '(4)'], {}), '(4, 4)\n', (1369, 1375), True, 'import numpy as np\n'), ((1453, 1478), 'numpy.linalg.inv', 'np.linalg.inv', (['extrinsics'], {}), '(extrinsics)\n', (1466, 1478), True, 'import numpy as np\n'), ((1565, 1585), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {}), '(filename)\n', (1575, 1585), True, 'import numpy as np\n'), ((1597, 1646), 'numpy.asarray', 'np.asarray', (['[[fx, 0, hw], [0, fy, hh], [0, 0, 1]]'], {}), '([[fx, 0, hw], [0, fy, hh], [0, 0, 1]])\n', (1607, 1646), True, 'import numpy as np\n'), ((2341, 2355), 'numpy.identity', 'np.identity', (['(4)'], {}), '(4)\n', (2352, 2355), True, 'import numpy as np\n'), ((5908, 5939), 'numpy.concatenate', 'np.concatenate', (['centers'], {'axis': '(0)'}), '(centers, axis=0)\n', (5922, 5939), True, 'import numpy as np\n'), ((5952, 5981), 'numpy.concatenate', 'np.concatenate', (['sizes'], {'axis': '(0)'}), '(sizes, axis=0)\n', (5966, 5981), True, 'import numpy as np\n'), ((6002, 6039), 'numpy.concatenate', 'np.concatenate', (['boxes_corners'], {'axis': '(0)'}), '(boxes_corners, axis=0)\n', (6016, 6039), True, 'import numpy as np\n'), ((1222, 1244), 'numpy.asarray', 'np.asarray', (['angle_axis'], {}), '(angle_axis)\n', (1232, 1244), True, 'import numpy as np\n'), ((3004, 3039), 'numpy.dot', 'np.dot', (['pose', 'new_points_3d_padding'], {}), '(pose, new_points_3d_padding)\n', (3010, 3039), True, 'import numpy as np\n'), ((4359, 4378), 'numpy.zeros', 'np.zeros', (['(0, 8, 3)'], {}), '((0, 8, 3))\n', (4367, 4378), True, 'import numpy as np\n'), ((4397, 4413), 'numpy.zeros', 'np.zeros', (['(0, 3)'], {}), '((0, 3))\n', (4405, 4413), True, 'import numpy as np\n'), ((4430, 4446), 'numpy.zeros', 'np.zeros', (['(0, 3)'], {}), '((0, 3))\n', (4438, 4446), True, 'import numpy as np\n'), ((7180, 7224), 'os.path.join', 'os.path.join', (['self.root_path', '"""lowres_depth"""'], {}), "(self.root_path, 'lowres_depth')\n", (7192, 7224), False, 'import os\n'), ((7760, 7808), 'os.path.join', 'os.path.join', (['self.root_path', '"""lowres_wide.traj"""'], {}), "(self.root_path, 'lowres_wide.traj')\n", (7772, 7808), False, 'import os\n'), ((8147, 8172), 'os.path.exists', 'os.path.exists', (['traj_file'], {}), '(traj_file)\n', (8161, 8172), False, 'import os\n'), ((10782, 10833), 'os.path.join', 'os.path.join', (['self.root_path', '"""lowres_depth"""', 'fname'], {}), "(self.root_path, 'lowres_depth', fname)\n", (10794, 10833), False, 'import os\n'), ((10941, 10991), 'os.path.join', 'os.path.join', (['self.root_path', '"""lowres_wide"""', 'fname'], {}), "(self.root_path, 'lowres_wide', fname)\n", (10953, 10991), False, 'import os\n'), ((11121, 11153), 'cv2.imread', 'cv2.imread', (['depth_image_path', '(-1)'], {}), '(depth_image_path, -1)\n', (11131, 11153), False, 'import cv2\n'), ((11179, 11201), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (11189, 11201), False, 'import cv2\n'), ((11395, 11435), 'copy.deepcopy', 'copy.deepcopy', (['self.intrinsics[frame_id]'], {}), '(self.intrinsics[frame_id])\n', (11408, 11435), False, 'import copy\n'), ((11765, 11790), 'copy.deepcopy', 'copy.deepcopy', (['frame_pose'], {}), '(frame_pose)\n', (11778, 11790), False, 'import copy\n'), ((2721, 2746), 'numpy.ones_like', 'np.ones_like', (['u[d_filter]'], {}), '(u[d_filter])\n', (2733, 2746), True, 'import numpy as np\n'), ((2791, 2819), 'numpy.linalg.inv', 'np.linalg.inv', (['intrinsic_4x4'], {}), '(intrinsic_4x4)\n', (2804, 2819), True, 'import numpy as np\n'), ((7240, 7268), 'os.path.exists', 'os.path.exists', (['depth_folder'], {}), '(depth_folder)\n', (7254, 7268), False, 'import os\n'), ((8404, 8500), 'os.path.join', 'os.path.join', (['self.root_path', '"""lowres_wide_intrinsics"""', 'f"""{self.video_id}_{frame_id}.pincam"""'], {}), "(self.root_path, 'lowres_wide_intrinsics',\n f'{self.video_id}_{frame_id}.pincam')\n", (8416, 8500), False, 'import os\n'), ((9688, 9711), 'os.path.exists', 'os.path.exists', (['gt_path'], {}), '(gt_path)\n', (9702, 9711), False, 'import os\n'), ((10849, 10881), 'os.path.exists', 'os.path.exists', (['depth_image_path'], {}), '(depth_image_path)\n', (10863, 10881), False, 'import os\n'), ((11008, 11040), 'os.path.exists', 'os.path.exists', (['depth_image_path'], {}), '(depth_image_path)\n', (11022, 11040), False, 'import os\n'), ((11818, 11840), 'numpy.float', 'np.float', (['depth_height'], {}), '(depth_height)\n', (11826, 11840), True, 'import numpy as np\n'), ((11878, 11899), 'numpy.float', 'np.float', (['depth_width'], {}), '(depth_width)\n', (11886, 11899), True, 'import numpy as np\n'), ((11979, 12019), 'numpy.zeros', 'np.zeros', (['[depth_height, depth_width, 3]'], {}), '([depth_height, depth_width, 3])\n', (11987, 12019), True, 'import numpy as np\n'), ((12097, 12119), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (12107, 12119), False, 'import cv2\n'), ((2926, 2962), 'numpy.ones', 'np.ones', (['(1, new_points_3d.shape[1])'], {}), '((1, new_points_3d.shape[1]))\n', (2933, 2962), True, 'import numpy as np\n'), ((4887, 4945), 'numpy.array', 'np.array', (["data['segments']['obbAligned']['normalizedAxes']"], {}), "(data['segments']['obbAligned']['normalizedAxes'])\n", (4895, 4945), True, 'import numpy as np\n'), ((4999, 5051), 'numpy.array', 'np.array', (["data['segments']['obbAligned']['centroid']"], {}), "(data['segments']['obbAligned']['centroid'])\n", (5007, 5051), True, 'import numpy as np\n'), ((5082, 5137), 'numpy.array', 'np.array', (["data['segments']['obbAligned']['axesLengths']"], {}), "(data['segments']['obbAligned']['axesLengths'])\n", (5090, 5137), True, 'import numpy as np\n'), ((5646, 5668), 'numpy.mean', 'np.mean', (['box3d'], {'axis': '(0)'}), '(box3d, axis=0)\n', (5653, 5668), True, 'import numpy as np\n'), ((7428, 7447), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (7444, 7447), False, 'import os\n'), ((8516, 8544), 'os.path.exists', 'os.path.exists', (['intrinsic_fn'], {}), '(intrinsic_fn)\n', (8530, 8544), False, 'import os\n'), ((8752, 8780), 'os.path.exists', 'os.path.exists', (['intrinsic_fn'], {}), '(intrinsic_fn)\n', (8766, 8780), False, 'import os\n'), ((8988, 9016), 'os.path.exists', 'os.path.exists', (['intrinsic_fn'], {}), '(intrinsic_fn)\n', (9002, 9016), False, 'import os\n'), ((7360, 7395), 'os.path.join', 'os.path.join', (['depth_folder', '"""*.png"""'], {}), "(depth_folder, '*.png')\n", (7372, 7395), False, 'import os\n')]
# Basic imports for Ryu from ryu.base import app_manager from ryu.controller import ofp_event from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER from ryu.controller.handler import set_ev_cls import ryu.ofproto.ofproto_v1_3_parser as parser import ryu.ofproto.ofproto_v1_3 as ofproto from ryu.lib.packet import packet from ryu.lib.packet import ether_types from ryu.lib.packet import ethernet, arp, ipv4, ipv6 class CockpitApp(app_manager.RyuApp): def __init__(self, *args, **kwargs): super(CockpitApp, self).__init__(*args, **kwargs) def info(self, text): print("*" * (len(text) + 4)) print("* {:s} *".format(text)) print("*" * (len(text) + 4)) def program_flow(self, dp, match, actions, priority = 0, hard_timeout = 600, idle_timeout = 60 ): """ Programs a new flow into a switch. Programming a new flow with the exact same match of an existing one will replace the existing flow. """ flowmod = parser.OFPFlowMod( dp, match = match, instructions = [ parser.OFPInstructionActions( ofproto.OFPIT_APPLY_ACTIONS, actions ) ], priority = priority, hard_timeout = hard_timeout, idle_timeout = idle_timeout ) dp.send_msg(flowmod) def send_pkt(self, dp, data, port = ofproto.OFPP_FLOOD): """ Convenience method that instructs a switch to forward a packet from the controller. """ out = parser.OFPPacketOut( datapath = dp, actions = [parser.OFPActionOutput(port)], in_port = dp.ofproto.OFPP_CONTROLLER, data = data, buffer_id = ofproto.OFP_NO_BUFFER ) dp.send_msg(out) @set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER) # Make sure the name of the function does not collide with those # of classes, that inherit from this class. Otherwise this # function will not be invoked. def __cockpit_app_switch_features_handler(self, ev): dp = ev.msg.datapath print("switch with id {:d} connected".format(dp.id)) # Install default flow # I.e., forward all unmatched packets to controller self.program_flow( dp, parser.OFPMatch(), # match all packets [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER)], hard_timeout = 0, # no timeout idle_timeout = 0 # no timeout ) # Prevent switches from truncating packets when forwarding # to controller dp.send_msg(dp.ofproto_parser.OFPSetConfig( dp, dp.ofproto.OFPC_FRAG_NORMAL, 0xffff ))
[ "ryu.ofproto.ofproto_v1_3_parser.OFPMatch", "ryu.controller.handler.set_ev_cls", "ryu.ofproto.ofproto_v1_3_parser.OFPActionOutput", "ryu.ofproto.ofproto_v1_3_parser.OFPInstructionActions" ]
[((1886, 1949), 'ryu.controller.handler.set_ev_cls', 'set_ev_cls', (['ofp_event.EventOFPSwitchFeatures', 'CONFIG_DISPATCHER'], {}), '(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)\n', (1896, 1949), False, 'from ryu.controller.handler import set_ev_cls\n'), ((2413, 2430), 'ryu.ofproto.ofproto_v1_3_parser.OFPMatch', 'parser.OFPMatch', ([], {}), '()\n', (2428, 2430), True, 'import ryu.ofproto.ofproto_v1_3_parser as parser\n'), ((2465, 2512), 'ryu.ofproto.ofproto_v1_3_parser.OFPActionOutput', 'parser.OFPActionOutput', (['ofproto.OFPP_CONTROLLER'], {}), '(ofproto.OFPP_CONTROLLER)\n', (2487, 2512), True, 'import ryu.ofproto.ofproto_v1_3_parser as parser\n'), ((1131, 1197), 'ryu.ofproto.ofproto_v1_3_parser.OFPInstructionActions', 'parser.OFPInstructionActions', (['ofproto.OFPIT_APPLY_ACTIONS', 'actions'], {}), '(ofproto.OFPIT_APPLY_ACTIONS, actions)\n', (1159, 1197), True, 'import ryu.ofproto.ofproto_v1_3_parser as parser\n'), ((1692, 1720), 'ryu.ofproto.ofproto_v1_3_parser.OFPActionOutput', 'parser.OFPActionOutput', (['port'], {}), '(port)\n', (1714, 1720), True, 'import ryu.ofproto.ofproto_v1_3_parser as parser\n')]
#!/usr/bin/env python # coding:utf-8 """ @Time : 2021/10/15 17:29 @Author : harvey @File : filters.py @Software: PyCharm @Desc: @Module """ import uuid import datetime from django.contrib.auth.models import AbstractUser from django.conf import settings from django.db import models from django.utils import timezone def default_date_expired(): return timezone.now() + timezone.timedelta(days=365 * 10) return datetime.datetime(2099, 10, 10, 10, 10, 10, 0, tzinfo=timezone.get_current_timezone()) class User(AbstractUser): SOURCE_ITEM = [('local', 'local'), ('ldap', 'ldap')] AVATAR = 'static/image/avatar/1.jpeg' id = models.UUIDField(default=uuid.uuid4, primary_key=True, verbose_name='主键') username = models.CharField(max_length=128, unique=True, verbose_name='用户名') realname = models.CharField(max_length=128, verbose_name='姓名') nickname = models.CharField(max_length=128, unique=True, verbose_name='昵称') email = models.EmailField(max_length=128, unique=True, verbose_name='邮箱') phone = models.CharField(max_length=20, blank=True, null=True, verbose_name='手机') avatar = models.ImageField(upload_to='static/image/avatar', default=AVATAR, verbose_name='头像') introduction = models.CharField(max_length=128, null=True, blank=True, verbose_name='简介') source = models.CharField(max_length=32, choices=SOURCE_ITEM, verbose_name='来源') # modules = models.ManyToManyField(AssetsModule,null=True,blank=True, verbose_name='关联模块') # sql_user = models.ManyToManyField(AssetsDbUser, verbose_name='授权SQL用户') role_id = models.UUIDField(null=True, blank=True, verbose_name="数据库权限角色ID") date_expired = models.DateTimeField( default=default_date_expired(), blank=True, null=True, db_index=True, verbose_name='Date expired' ) created_at = models.DateTimeField(null=True, blank=True, auto_now_add=True, verbose_name='创建时间') updated_at = models.DateTimeField(null=True, blank=True, auto_now=True, verbose_name='创建时间') created_by = models.ForeignKey('self', null=True, related_name='create_user', on_delete=models.CASCADE, verbose_name='创建者') updated_by = models.ForeignKey('self', null=True, related_name='updated_user', on_delete=models.CASCADE, verbose_name='更新者') # first_name = None # last_name = None # date_joined = None # groups = models.ManyToManyField( # 'users.UserGroup', related_name='users', # blank=True, verbose_name='User group' # ) # 重写该方法 def get_full_name(self): """ Return the first_name plus the last_name, with a space in between. """ return self.username def get_groups_name(self): group_set = self.groups.all() return [g.name for g in group_set] def get_avatar_url(self): return f"{settings.IMAGE_URL}/{self.avatar}" @property def is_expired(self): if self.date_expired and self.date_expired < timezone.now(): return True else: return False @property def is_valid(self): if self.is_active and not self.is_expired: return True return False
[ "django.utils.timezone.get_current_timezone", "django.db.models.CharField", "django.db.models.DateTimeField", "django.db.models.ForeignKey", "django.utils.timezone.now", "django.db.models.EmailField", "django.db.models.ImageField", "django.utils.timezone.timedelta", "django.db.models.UUIDField" ]
[((650, 723), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'primary_key': '(True)', 'verbose_name': '"""主键"""'}), "(default=uuid.uuid4, primary_key=True, verbose_name='主键')\n", (666, 723), False, 'from django.db import models\n'), ((739, 804), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'unique': '(True)', 'verbose_name': '"""用户名"""'}), "(max_length=128, unique=True, verbose_name='用户名')\n", (755, 804), False, 'from django.db import models\n'), ((820, 871), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'verbose_name': '"""姓名"""'}), "(max_length=128, verbose_name='姓名')\n", (836, 871), False, 'from django.db import models\n'), ((887, 951), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'unique': '(True)', 'verbose_name': '"""昵称"""'}), "(max_length=128, unique=True, verbose_name='昵称')\n", (903, 951), False, 'from django.db import models\n'), ((964, 1029), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(128)', 'unique': '(True)', 'verbose_name': '"""邮箱"""'}), "(max_length=128, unique=True, verbose_name='邮箱')\n", (981, 1029), False, 'from django.db import models\n'), ((1042, 1115), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""手机"""'}), "(max_length=20, blank=True, null=True, verbose_name='手机')\n", (1058, 1115), False, 'from django.db import models\n'), ((1129, 1218), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""static/image/avatar"""', 'default': 'AVATAR', 'verbose_name': '"""头像"""'}), "(upload_to='static/image/avatar', default=AVATAR,\n verbose_name='头像')\n", (1146, 1218), False, 'from django.db import models\n'), ((1234, 1308), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'null': '(True)', 'blank': '(True)', 'verbose_name': '"""简介"""'}), "(max_length=128, null=True, blank=True, verbose_name='简介')\n", (1250, 1308), False, 'from django.db import models\n'), ((1322, 1393), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(32)', 'choices': 'SOURCE_ITEM', 'verbose_name': '"""来源"""'}), "(max_length=32, choices=SOURCE_ITEM, verbose_name='来源')\n", (1338, 1393), False, 'from django.db import models\n'), ((1581, 1646), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""数据库权限角色ID"""'}), "(null=True, blank=True, verbose_name='数据库权限角色ID')\n", (1597, 1646), False, 'from django.db import models\n'), ((1825, 1913), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)', 'auto_now_add': '(True)', 'verbose_name': '"""创建时间"""'}), "(null=True, blank=True, auto_now_add=True, verbose_name\n ='创建时间')\n", (1845, 1913), False, 'from django.db import models\n'), ((1926, 2005), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'blank': '(True)', 'auto_now': '(True)', 'verbose_name': '"""创建时间"""'}), "(null=True, blank=True, auto_now=True, verbose_name='创建时间')\n", (1946, 2005), False, 'from django.db import models\n'), ((2023, 2138), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'null': '(True)', 'related_name': '"""create_user"""', 'on_delete': 'models.CASCADE', 'verbose_name': '"""创建者"""'}), "('self', null=True, related_name='create_user', on_delete=\n models.CASCADE, verbose_name='创建者')\n", (2040, 2138), False, 'from django.db import models\n'), ((2186, 2302), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'null': '(True)', 'related_name': '"""updated_user"""', 'on_delete': 'models.CASCADE', 'verbose_name': '"""更新者"""'}), "('self', null=True, related_name='updated_user', on_delete\n =models.CASCADE, verbose_name='更新者')\n", (2203, 2302), False, 'from django.db import models\n'), ((364, 378), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (376, 378), False, 'from django.utils import timezone\n'), ((381, 414), 'django.utils.timezone.timedelta', 'timezone.timedelta', ([], {'days': '(365 * 10)'}), '(days=365 * 10)\n', (399, 414), False, 'from django.utils import timezone\n'), ((481, 512), 'django.utils.timezone.get_current_timezone', 'timezone.get_current_timezone', ([], {}), '()\n', (510, 512), False, 'from django.utils import timezone\n'), ((3015, 3029), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (3027, 3029), False, 'from django.utils import timezone\n')]
# coding: utf-8 from __future__ import absolute_import import datetime import re import importlib import six from huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class KmsClient(Client): """ :param configuration: .Configuration object for this client :param pool_threads: The number of threads to use for async requests to the API. More threads means more concurrent API requests. """ PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long': int if six.PY3 else long, 'float': float, 'str': str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object, } def __init__(self): super(KmsClient, self).__init__() self.model_package = importlib.import_module("huaweicloudsdkkms.v1.model") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @staticmethod def new_builder(clazz): return ClientBuilder(clazz) def batch_create_kms_tags(self, request): """批量添加删除密钥标签 - 功能介绍:批量添加删除密钥标签。 :param BatchCreateKmsTagsRequest request :return: BatchCreateKmsTagsResponse """ return self.batch_create_kms_tags_with_http_info(request) def batch_create_kms_tags_with_http_info(self, request): """批量添加删除密钥标签 - 功能介绍:批量添加删除密钥标签。 :param BatchCreateKmsTagsRequest request :return: BatchCreateKmsTagsResponse """ all_params = ['key_id', 'version_id', 'batch_create_kms_tags_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'key_id' in local_var_params: path_params['key_id'] = local_var_params['key_id'] if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/{key_id}/tags/action', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='BatchCreateKmsTagsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def cancel_grant(self, request): """撤销授权 - 功能介绍:撤销授权,授权用户撤销被授权用户操作密钥的权限。 - 说明: - 创建密钥的用户才能撤销该密钥授权。 :param CancelGrantRequest request :return: CancelGrantResponse """ return self.cancel_grant_with_http_info(request) def cancel_grant_with_http_info(self, request): """撤销授权 - 功能介绍:撤销授权,授权用户撤销被授权用户操作密钥的权限。 - 说明: - 创建密钥的用户才能撤销该密钥授权。 :param CancelGrantRequest request :return: CancelGrantResponse """ all_params = ['version_id', 'cancel_grant_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/revoke-grant', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CancelGrantResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def cancel_key_deletion(self, request): """取消计划删除密钥 - 功能介绍:取消计划删除密钥。 - 说明:密钥处于“计划删除”状态才能取消计划删除密钥。 :param CancelKeyDeletionRequest request :return: CancelKeyDeletionResponse """ return self.cancel_key_deletion_with_http_info(request) def cancel_key_deletion_with_http_info(self, request): """取消计划删除密钥 - 功能介绍:取消计划删除密钥。 - 说明:密钥处于“计划删除”状态才能取消计划删除密钥。 :param CancelKeyDeletionRequest request :return: CancelKeyDeletionResponse """ all_params = ['version_id', 'cancel_key_deletion_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/cancel-key-deletion', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CancelKeyDeletionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def cancel_self_grant(self, request): """退役授权 - 功能介绍:退役授权,表示被授权用户不再具有授权密钥的操作权。 例如:用户A授权用户B可以操作密钥A/key,同时授权用户C可以撤销该授权, 那么用户A、B、C均可退役该授权,退役授权后,用户B不再可以使用A/key。 - 须知: 可执行退役授权的主体包括: - 创建授权的用户; - 授权中retiring_principal指向的用户; - 当授权的操作列表中包含retire-grant时,grantee_principal指向的用户。 :param CancelSelfGrantRequest request :return: CancelSelfGrantResponse """ return self.cancel_self_grant_with_http_info(request) def cancel_self_grant_with_http_info(self, request): """退役授权 - 功能介绍:退役授权,表示被授权用户不再具有授权密钥的操作权。 例如:用户A授权用户B可以操作密钥A/key,同时授权用户C可以撤销该授权, 那么用户A、B、C均可退役该授权,退役授权后,用户B不再可以使用A/key。 - 须知: 可执行退役授权的主体包括: - 创建授权的用户; - 授权中retiring_principal指向的用户; - 当授权的操作列表中包含retire-grant时,grantee_principal指向的用户。 :param CancelSelfGrantRequest request :return: CancelSelfGrantResponse """ all_params = ['version_id', 'cancel_self_grant_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/retire-grant', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CancelSelfGrantResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_datakey(self, request): """创建数据密钥 - 功能介绍:创建数据密钥,返回结果包含明文和密文。 :param CreateDatakeyRequest request :return: CreateDatakeyResponse """ return self.create_datakey_with_http_info(request) def create_datakey_with_http_info(self, request): """创建数据密钥 - 功能介绍:创建数据密钥,返回结果包含明文和密文。 :param CreateDatakeyRequest request :return: CreateDatakeyResponse """ all_params = ['version_id', 'create_datakey_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/create-datakey', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateDatakeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_datakey_without_plaintext(self, request): """创建不含明文数据密钥 - 功能介绍:创建数据密钥,返回结果只包含密文。 :param CreateDatakeyWithoutPlaintextRequest request :return: CreateDatakeyWithoutPlaintextResponse """ return self.create_datakey_without_plaintext_with_http_info(request) def create_datakey_without_plaintext_with_http_info(self, request): """创建不含明文数据密钥 - 功能介绍:创建数据密钥,返回结果只包含密文。 :param CreateDatakeyWithoutPlaintextRequest request :return: CreateDatakeyWithoutPlaintextResponse """ all_params = ['version_id', 'create_datakey_without_plaintext_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/create-datakey-without-plaintext', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateDatakeyWithoutPlaintextResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_grant(self, request): """创建授权 - 功能介绍:创建授权,被授权用户可以对授权密钥进行操作。 - 说明: - 服务默认主密钥(密钥别名后缀为“/default”)不可以授权。 :param CreateGrantRequest request :return: CreateGrantResponse """ return self.create_grant_with_http_info(request) def create_grant_with_http_info(self, request): """创建授权 - 功能介绍:创建授权,被授权用户可以对授权密钥进行操作。 - 说明: - 服务默认主密钥(密钥别名后缀为“/default”)不可以授权。 :param CreateGrantRequest request :return: CreateGrantResponse """ all_params = ['version_id', 'create_grant_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/create-grant', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateGrantResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_key(self, request): """创建密钥 - 功能介绍:创建用户主密钥,可用来加密数据密钥。 - 说明: 别名“/default”为服务默认主密钥的后缀名,由服务自动创建。因此用户创建的主密钥别名不能与服务默认主密钥的别名相同,即后缀名不能为“/default”。对于开通企业项目的用户,服务默认主密钥属于且只能属于默认企业项目下,且不支持企业资源的迁入迁出。服务默认主密钥为用户提供基础的云上加密功能,满足合规要求。因此,在企业多项目下,其他非默认企业项目下的用户均可使用该密钥。若客户有企业管理资源诉求,请自行创建和使用密钥。 :param CreateKeyRequest request :return: CreateKeyResponse """ return self.create_key_with_http_info(request) def create_key_with_http_info(self, request): """创建密钥 - 功能介绍:创建用户主密钥,可用来加密数据密钥。 - 说明: 别名“/default”为服务默认主密钥的后缀名,由服务自动创建。因此用户创建的主密钥别名不能与服务默认主密钥的别名相同,即后缀名不能为“/default”。对于开通企业项目的用户,服务默认主密钥属于且只能属于默认企业项目下,且不支持企业资源的迁入迁出。服务默认主密钥为用户提供基础的云上加密功能,满足合规要求。因此,在企业多项目下,其他非默认企业项目下的用户均可使用该密钥。若客户有企业管理资源诉求,请自行创建和使用密钥。 :param CreateKeyRequest request :return: CreateKeyResponse """ all_params = ['version_id', 'create_key_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/create-key', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateKeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_kms_tag(self, request): """添加密钥标签 - 功能介绍:添加密钥标签。 :param CreateKmsTagRequest request :return: CreateKmsTagResponse """ return self.create_kms_tag_with_http_info(request) def create_kms_tag_with_http_info(self, request): """添加密钥标签 - 功能介绍:添加密钥标签。 :param CreateKmsTagRequest request :return: CreateKmsTagResponse """ all_params = ['version_id', 'key_id', 'create_kms_tag_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] if 'key_id' in local_var_params: path_params['key_id'] = local_var_params['key_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/{key_id}/tags', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateKmsTagResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_parameters_for_import(self, request): """获取密钥导入参数 - 功能介绍:获取导入密钥的必要参数,包括密钥导入令牌和密钥加密公钥。 - 说明:返回的公钥类型默认为RSA_2048。 :param CreateParametersForImportRequest request :return: CreateParametersForImportResponse """ return self.create_parameters_for_import_with_http_info(request) def create_parameters_for_import_with_http_info(self, request): """获取密钥导入参数 - 功能介绍:获取导入密钥的必要参数,包括密钥导入令牌和密钥加密公钥。 - 说明:返回的公钥类型默认为RSA_2048。 :param CreateParametersForImportRequest request :return: CreateParametersForImportResponse """ all_params = ['version_id', 'create_parameters_for_import_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/get-parameters-for-import', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateParametersForImportResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def create_random(self, request): """创建随机数 - 功能介绍: 生成8~8192bit范围内的随机数。 生成512bit的随机数。 :param CreateRandomRequest request :return: CreateRandomResponse """ return self.create_random_with_http_info(request) def create_random_with_http_info(self, request): """创建随机数 - 功能介绍: 生成8~8192bit范围内的随机数。 生成512bit的随机数。 :param CreateRandomRequest request :return: CreateRandomResponse """ all_params = ['version_id', 'create_random_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/gen-random', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='CreateRandomResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def decrypt_data(self, request): """解密数据 - 功能介绍:解密数据。 :param DecryptDataRequest request :return: DecryptDataResponse """ return self.decrypt_data_with_http_info(request) def decrypt_data_with_http_info(self, request): """解密数据 - 功能介绍:解密数据。 :param DecryptDataRequest request :return: DecryptDataResponse """ all_params = ['version_id', 'decrypt_data_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/decrypt-data', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DecryptDataResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def decrypt_datakey(self, request): """解密数据密钥 - 功能介绍:解密数据密钥,用指定的主密钥解密数据密钥。 :param DecryptDatakeyRequest request :return: DecryptDatakeyResponse """ return self.decrypt_datakey_with_http_info(request) def decrypt_datakey_with_http_info(self, request): """解密数据密钥 - 功能介绍:解密数据密钥,用指定的主密钥解密数据密钥。 :param DecryptDatakeyRequest request :return: DecryptDatakeyResponse """ all_params = ['version_id', 'decrypt_datakey_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/decrypt-datakey', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DecryptDatakeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def delete_imported_key_material(self, request): """删除密钥材料 - 功能介绍:删除密钥材料信息。 :param DeleteImportedKeyMaterialRequest request :return: DeleteImportedKeyMaterialResponse """ return self.delete_imported_key_material_with_http_info(request) def delete_imported_key_material_with_http_info(self, request): """删除密钥材料 - 功能介绍:删除密钥材料信息。 :param DeleteImportedKeyMaterialRequest request :return: DeleteImportedKeyMaterialResponse """ all_params = ['version_id', 'delete_imported_key_material_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/delete-imported-key-material', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DeleteImportedKeyMaterialResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def delete_key(self, request): """计划删除密钥 - 功能介绍:计划多少天后删除密钥,可设置7天~1096天内删除密钥。 :param DeleteKeyRequest request :return: DeleteKeyResponse """ return self.delete_key_with_http_info(request) def delete_key_with_http_info(self, request): """计划删除密钥 - 功能介绍:计划多少天后删除密钥,可设置7天~1096天内删除密钥。 :param DeleteKeyRequest request :return: DeleteKeyResponse """ all_params = ['version_id', 'delete_key_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/schedule-key-deletion', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DeleteKeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def delete_tag(self, request): """删除密钥标签 - 功能介绍:删除密钥标签。 :param DeleteTagRequest request :return: DeleteTagResponse """ return self.delete_tag_with_http_info(request) def delete_tag_with_http_info(self, request): """删除密钥标签 - 功能介绍:删除密钥标签。 :param DeleteTagRequest request :return: DeleteTagResponse """ all_params = ['key_id', 'key', 'version_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'key_id' in local_var_params: path_params['key_id'] = local_var_params['key_id'] if 'key' in local_var_params: path_params['key'] = local_var_params['key'] if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/{key_id}/tags/{key}', method='DELETE', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DeleteTagResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def disable_key(self, request): """禁用密钥 - 功能介绍:禁用密钥,密钥禁用后不可以使用。 - 说明:密钥为启用状态才能禁用密钥。 :param DisableKeyRequest request :return: DisableKeyResponse """ return self.disable_key_with_http_info(request) def disable_key_with_http_info(self, request): """禁用密钥 - 功能介绍:禁用密钥,密钥禁用后不可以使用。 - 说明:密钥为启用状态才能禁用密钥。 :param DisableKeyRequest request :return: DisableKeyResponse """ all_params = ['version_id', 'disable_key_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/disable-key', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DisableKeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def disable_key_rotation(self, request): """关闭密钥轮换 - 功能介绍:关闭用户主密钥轮换。 :param DisableKeyRotationRequest request :return: DisableKeyRotationResponse """ return self.disable_key_rotation_with_http_info(request) def disable_key_rotation_with_http_info(self, request): """关闭密钥轮换 - 功能介绍:关闭用户主密钥轮换。 :param DisableKeyRotationRequest request :return: DisableKeyRotationResponse """ all_params = ['version_id', 'disable_key_rotation_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/disable-key-rotation', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='DisableKeyRotationResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def enable_key(self, request): """启用密钥 - 功能介绍:启用密钥,密钥启用后才可以使用。 - 说明:密钥为禁用状态才能启用密钥。 :param EnableKeyRequest request :return: EnableKeyResponse """ return self.enable_key_with_http_info(request) def enable_key_with_http_info(self, request): """启用密钥 - 功能介绍:启用密钥,密钥启用后才可以使用。 - 说明:密钥为禁用状态才能启用密钥。 :param EnableKeyRequest request :return: EnableKeyResponse """ all_params = ['version_id', 'enable_key_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/enable-key', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='EnableKeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def enable_key_rotation(self, request): """开启密钥轮换 - 功能介绍:开启用户主密钥轮换。 - 说明: - 开启密钥轮换后,默认轮询间隔时间为365天。 - 默认主密钥及外部导入密钥不支持轮换操作。 :param EnableKeyRotationRequest request :return: EnableKeyRotationResponse """ return self.enable_key_rotation_with_http_info(request) def enable_key_rotation_with_http_info(self, request): """开启密钥轮换 - 功能介绍:开启用户主密钥轮换。 - 说明: - 开启密钥轮换后,默认轮询间隔时间为365天。 - 默认主密钥及外部导入密钥不支持轮换操作。 :param EnableKeyRotationRequest request :return: EnableKeyRotationResponse """ all_params = ['version_id', 'enable_key_rotation_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/enable-key-rotation', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='EnableKeyRotationResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def encrypt_data(self, request): """加密数据 - 功能介绍:加密数据,用指定的用户主密钥加密数据。 :param EncryptDataRequest request :return: EncryptDataResponse """ return self.encrypt_data_with_http_info(request) def encrypt_data_with_http_info(self, request): """加密数据 - 功能介绍:加密数据,用指定的用户主密钥加密数据。 :param EncryptDataRequest request :return: EncryptDataResponse """ all_params = ['version_id', 'encrypt_data_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/encrypt-data', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='EncryptDataResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def encrypt_datakey(self, request): """加密数据密钥 - 功能介绍:加密数据密钥,用指定的主密钥加密数据密钥。 :param EncryptDatakeyRequest request :return: EncryptDatakeyResponse """ return self.encrypt_datakey_with_http_info(request) def encrypt_datakey_with_http_info(self, request): """加密数据密钥 - 功能介绍:加密数据密钥,用指定的主密钥加密数据密钥。 :param EncryptDatakeyRequest request :return: EncryptDatakeyResponse """ all_params = ['version_id', 'encrypt_datakey_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/encrypt-datakey', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='EncryptDatakeyResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def import_key_material(self, request): """导入密钥材料 - 功能介绍:导入密钥材料。 :param ImportKeyMaterialRequest request :return: ImportKeyMaterialResponse """ return self.import_key_material_with_http_info(request) def import_key_material_with_http_info(self, request): """导入密钥材料 - 功能介绍:导入密钥材料。 :param ImportKeyMaterialRequest request :return: ImportKeyMaterialResponse """ all_params = ['version_id', 'import_key_material_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/import-key-material', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ImportKeyMaterialResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_grants(self, request): """查询授权列表 - 功能介绍:查询密钥的授权列表。 :param ListGrantsRequest request :return: ListGrantsResponse """ return self.list_grants_with_http_info(request) def list_grants_with_http_info(self, request): """查询授权列表 - 功能介绍:查询密钥的授权列表。 :param ListGrantsRequest request :return: ListGrantsResponse """ all_params = ['version_id', 'list_grants_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/list-grants', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListGrantsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_key_detail(self, request): """查询密钥信息 - 功能介绍:查询密钥详细信息。 :param ListKeyDetailRequest request :return: ListKeyDetailResponse """ return self.list_key_detail_with_http_info(request) def list_key_detail_with_http_info(self, request): """查询密钥信息 - 功能介绍:查询密钥详细信息。 :param ListKeyDetailRequest request :return: ListKeyDetailResponse """ all_params = ['version_id', 'list_key_detail_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/describe-key', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListKeyDetailResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_keys(self, request): """查询密钥列表 - 功能介绍:查询用户所有密钥列表。 :param ListKeysRequest request :return: ListKeysResponse """ return self.list_keys_with_http_info(request) def list_keys_with_http_info(self, request): """查询密钥列表 - 功能介绍:查询用户所有密钥列表。 :param ListKeysRequest request :return: ListKeysResponse """ all_params = ['version_id', 'list_keys_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/list-keys', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListKeysResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_kms_by_tags(self, request): """查询密钥实例 - 功能介绍:查询密钥实例。通过标签过滤,查询指定用户主密钥的详细信息。 :param ListKmsByTagsRequest request :return: ListKmsByTagsResponse """ return self.list_kms_by_tags_with_http_info(request) def list_kms_by_tags_with_http_info(self, request): """查询密钥实例 - 功能介绍:查询密钥实例。通过标签过滤,查询指定用户主密钥的详细信息。 :param ListKmsByTagsRequest request :return: ListKmsByTagsResponse """ all_params = ['resource_instances', 'version_id', 'list_kms_by_tags_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'resource_instances' in local_var_params: path_params['resource_instances'] = local_var_params['resource_instances'] if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/{resource_instances}/action', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListKmsByTagsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_kms_tags(self, request): """查询项目标签 - 功能介绍:查询用户在指定项目下的所有标签集合。 :param ListKmsTagsRequest request :return: ListKmsTagsResponse """ return self.list_kms_tags_with_http_info(request) def list_kms_tags_with_http_info(self, request): """查询项目标签 - 功能介绍:查询用户在指定项目下的所有标签集合。 :param ListKmsTagsRequest request :return: ListKmsTagsResponse """ all_params = ['version_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/tags', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListKmsTagsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def list_retirable_grants(self, request): """查询可退役授权列表 - 功能介绍:查询用户可以退役的授权列表。 :param ListRetirableGrantsRequest request :return: ListRetirableGrantsResponse """ return self.list_retirable_grants_with_http_info(request) def list_retirable_grants_with_http_info(self, request): """查询可退役授权列表 - 功能介绍:查询用户可以退役的授权列表。 :param ListRetirableGrantsRequest request :return: ListRetirableGrantsResponse """ all_params = ['version_id', 'list_retirable_grants_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/list-retirable-grants', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ListRetirableGrantsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_key_rotation_status(self, request): """查询密钥轮换状态 - 功能介绍:查询用户主密钥轮换状态。 :param ShowKeyRotationStatusRequest request :return: ShowKeyRotationStatusResponse """ return self.show_key_rotation_status_with_http_info(request) def show_key_rotation_status_with_http_info(self, request): """查询密钥轮换状态 - 功能介绍:查询用户主密钥轮换状态。 :param ShowKeyRotationStatusRequest request :return: ShowKeyRotationStatusResponse """ all_params = ['version_id', 'show_key_rotation_status_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/get-key-rotation-status', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowKeyRotationStatusResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_kms_tags(self, request): """查询密钥标签 - 功能介绍:查询密钥标签。 :param ShowKmsTagsRequest request :return: ShowKmsTagsResponse """ return self.show_kms_tags_with_http_info(request) def show_kms_tags_with_http_info(self, request): """查询密钥标签 - 功能介绍:查询密钥标签。 :param ShowKmsTagsRequest request :return: ShowKmsTagsResponse """ all_params = ['version_id', 'key_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] if 'key_id' in local_var_params: path_params['key_id'] = local_var_params['key_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/{key_id}/tags', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowKmsTagsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_user_instances(self, request): """查询实例数 - 功能介绍:查询实例数,获取用户已经创建的用户主密钥数量。 :param ShowUserInstancesRequest request :return: ShowUserInstancesResponse """ return self.show_user_instances_with_http_info(request) def show_user_instances_with_http_info(self, request): """查询实例数 - 功能介绍:查询实例数,获取用户已经创建的用户主密钥数量。 :param ShowUserInstancesRequest request :return: ShowUserInstancesResponse """ all_params = ['version_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/user-instances', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowUserInstancesResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_user_quotas(self, request): """查询配额 - 功能介绍:查询配额,查询用户可以创建的用户主密钥配额总数及当前使用量信息。 :param ShowUserQuotasRequest request :return: ShowUserQuotasResponse """ return self.show_user_quotas_with_http_info(request) def show_user_quotas_with_http_info(self, request): """查询配额 - 功能介绍:查询配额,查询用户可以创建的用户主密钥配额总数及当前使用量信息。 :param ShowUserQuotasRequest request :return: ShowUserQuotasResponse """ all_params = ['version_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/user-quotas', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowUserQuotasResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_key_alias(self, request): """修改密钥别名 - 功能介绍:修改用户主密钥别名。 - 说明: - 服务默认主密钥(密钥别名后缀为“/default”)不可以修改。 - 密钥处于“计划删除”状态,密钥别名不可以修改。 :param UpdateKeyAliasRequest request :return: UpdateKeyAliasResponse """ return self.update_key_alias_with_http_info(request) def update_key_alias_with_http_info(self, request): """修改密钥别名 - 功能介绍:修改用户主密钥别名。 - 说明: - 服务默认主密钥(密钥别名后缀为“/default”)不可以修改。 - 密钥处于“计划删除”状态,密钥别名不可以修改。 :param UpdateKeyAliasRequest request :return: UpdateKeyAliasResponse """ all_params = ['version_id', 'update_key_alias_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/update-key-alias', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateKeyAliasResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_key_description(self, request): """修改密钥描述 - 功能介绍:修改用户主密钥描述信息。 - 说明: - 服务默认主密钥(密钥别名后缀为“/default”)不可以修改。 - 密钥处于“计划删除”状态,密钥描述不可以修改。 :param UpdateKeyDescriptionRequest request :return: UpdateKeyDescriptionResponse """ return self.update_key_description_with_http_info(request) def update_key_description_with_http_info(self, request): """修改密钥描述 - 功能介绍:修改用户主密钥描述信息。 - 说明: - 服务默认主密钥(密钥别名后缀为“/default”)不可以修改。 - 密钥处于“计划删除”状态,密钥描述不可以修改。 :param UpdateKeyDescriptionRequest request :return: UpdateKeyDescriptionResponse """ all_params = ['version_id', 'update_key_description_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/update-key-description', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateKeyDescriptionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def update_key_rotation_interval(self, request): """修改密钥轮换周期 - 功能介绍:修改用户主密钥轮换周期。 :param UpdateKeyRotationIntervalRequest request :return: UpdateKeyRotationIntervalResponse """ return self.update_key_rotation_interval_with_http_info(request) def update_key_rotation_interval_with_http_info(self, request): """修改密钥轮换周期 - 功能介绍:修改用户主密钥轮换周期。 :param UpdateKeyRotationIntervalRequest request :return: UpdateKeyRotationIntervalResponse """ all_params = ['version_id', 'update_key_rotation_interval_request_body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/{version_id}/{project_id}/kms/update-key-rotation-interval', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='UpdateKeyRotationIntervalResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_version(self, request): """查询指定版本信息 - 功能介绍:查指定API版本信息。 :param ShowVersionRequest request :return: ShowVersionResponse """ return self.show_version_with_http_info(request) def show_version_with_http_info(self, request): """查询指定版本信息 - 功能介绍:查指定API版本信息。 :param ShowVersionRequest request :return: ShowVersionResponse """ all_params = ['version_id'] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} if 'version_id' in local_var_params: path_params['version_id'] = local_var_params['version_id'] query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/{version_id}', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowVersionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def show_versions(self, request): """查询版本信息列表 - 功能介绍:查询API版本信息列表。 :param ShowVersionsRequest request :return: ShowVersionsResponse """ return self.show_versions_with_http_info(request) def show_versions_with_http_info(self, request): """查询版本信息列表 - 功能介绍:查询API版本信息列表。 :param ShowVersionsRequest request :return: ShowVersionsResponse """ all_params = [] local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {} query_params = [] header_params = {} form_params = {} body_params = None if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] auth_settings = [] return self.call_api( resource_path='/', method='GET', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='ShowVersionsResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): """Makes the HTTP request and returns deserialized data. :param resource_path: Path to method endpoint. :param method: Method to call. :param path_params: Path parameters in the url. :param query_params: Query parameters in the url. :param header_params: Header parameters to be placed in the request header. :param body: Request body. :param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for the request. :param response_type: Response data type. :param response_headers: Header should be added to response data. :param collection_formats: dict of collection formats for path, query, header, and post parameters. :param request_type: Request data type. :return: Return the response directly. """ return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params, response_type=response_type, response_headers=response_headers, collection_formats=collection_formats, request_type=request_type)
[ "huaweicloudsdkcore.client.ClientBuilder", "huaweicloudsdkcore.utils.http_utils.select_header_content_type", "importlib.import_module" ]
[((1024, 1077), 'importlib.import_module', 'importlib.import_module', (['"""huaweicloudsdkkms.v1.model"""'], {}), "('huaweicloudsdkkms.v1.model')\n", (1047, 1077), False, 'import importlib\n'), ((1211, 1231), 'huaweicloudsdkcore.client.ClientBuilder', 'ClientBuilder', (['clazz'], {}), '(clazz)\n', (1224, 1231), False, 'from huaweicloudsdkcore.client import Client, ClientBuilder\n'), ((2632, 2705), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (2669, 2705), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((4613, 4686), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (4650, 4686), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((6611, 6684), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (6648, 6684), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((8986, 9059), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (9023, 9059), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((10908, 10981), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (10945, 10981), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((12970, 13043), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (13007, 13043), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((15000, 15073), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (15037, 15073), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((17320, 17393), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (17357, 17393), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((19320, 19393), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (19357, 19393), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((21416, 21489), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (21453, 21489), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((23389, 23462), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (23426, 23462), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((25258, 25331), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (25295, 25331), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((27188, 27261), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (27225, 27261), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((29196, 29269), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (29233, 29269), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((31130, 31203), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (31167, 31203), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((34904, 34977), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (34941, 34977), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((36846, 36919), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (36883, 36919), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((38779, 38852), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (38816, 38852), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((40827, 40900), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (40864, 40900), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((42738, 42811), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (42775, 42811), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((44668, 44741), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (44705, 44741), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((46604, 46677), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (46641, 46677), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((48493, 48566), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (48530, 48566), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((50393, 50466), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (50430, 50466), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((52257, 52330), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (52294, 52330), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((54359, 54432), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (54396, 54432), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((57990, 58063), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (58027, 58063), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((59987, 60060), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (60024, 60060), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((67232, 67305), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (67269, 67305), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((69349, 69422), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (69386, 69422), False, 'from huaweicloudsdkcore.utils import http_utils\n'), ((71380, 71453), 'huaweicloudsdkcore.utils.http_utils.select_header_content_type', 'http_utils.select_header_content_type', (["['application/json;charset=UTF-8']"], {}), "(['application/json;charset=UTF-8'])\n", (71417, 71453), False, 'from huaweicloudsdkcore.utils import http_utils\n')]
''' Parse the MC_object database from the Habitat Stratus backup. There are still lots of unknowns: * Many objects have container 0x20202020. They appear to be unused, but it's unclear why. * Some address strings have unprintable characters. It's unclear if this was intentional or garbage data. * Matchbook (class 49): there are 3 objects of this type, but they appear to be overwritten or otherwise unused. * When combined with MC_regions, we find lots of orphaned objects. This may be because of broken relationships. Some appear to be pockets of avatars. ''' import json, struct, sys from collections import OrderedDict STRUCT_ITEMS = ( 'id', 'class', 'container', 'contype', 'x_pos', 'y_pos', 'style', 'gr_state', 'orientation', 'gr_width', 'nitty_bits', 'prop_length', 'property_data', ) FORMAT = '> 3I 7H I 10x H 86s' assert struct.calcsize(FORMAT) == 128 PARSERS = { 2: ('>HI', ['magic_type', 'magic_data']), 129: ('>H', ['state']), 6: ('>HW', ['open_flags', 'key']), 130: ('>H', ['open_flags']), 10: ('>HIH', ['current_page', 'text_id', 'last_page']), 12: ('>H', ['filled']), 13: ('>HW', ['open_flags', 'key']), 131: ('>HH', ['width', 'length']), 132: ('>xxxxxxi', ['connection']), 158: ('>H', ['open_flags']), 134: ('>H', ['open_flags']), 135: ('>HW', ['open_flags', 'key']), 136: ('>I', ['take']), 137: ('>H', ['open_flags']), 18: ('>HW', ['open_flags', 'key']), # + whoput array 20: ('>H', ['live']), 21: ('>H', ['state']), 22: ('>HWIH', ['open_flags', 'key', 'owner', 'locked']), 23: ('>HWi', ['open_flags', 'key', 'connection']), 25: ('>HH', ['count', 'effect']), 28: ('>HI20s', ['state', 'take', 'address']), 26: ('>H', ['charge']), 27: ('>H', ['state']), 29: ('>H', ['mass']), 30: ('>H', ['on']), 93: ('>H', ['flat_type']), 139: ('>H', ['on']), 140: ('>I', ['take']), 141: ('>H', ['live']), 5: ('>H', ['state']), 32: ('>HW', ['open_flags', 'key']), 33: ('>HI', ['magic_type', 'magic_data']), 98: ('>HWHHHHHHHHHHHH', ['open_flags', 'key', 'x_offset_1', 'y_offset_1', 'x_offset_2', 'y_offset_2', 'x_offset_3', 'y_offset_3', 'x_offset_4', 'y_offset_4', 'x_offset_5', 'y_offset_5', 'x_offset_6', 'y_offset_6']), 35: ('>H', ['pinpulled']), 38: ('>H', ['state']), 88: ('>HW', ['open_flags', 'key']), 40: ('>H', ['instant_what']), 42: ('>W', ['key_number']), 43: ('>H', ['is_magic']), 45: ('>HHxxxxH', ['lamp_state', 'wisher', 'live']), 46: ('>HI', ['magic_type', 'magic_data']), 48: ('>HI', ['mail_arrived', 'owner']), # XXX can't find valid example to decode varstring properly #49: ('>84s', ['mtext']), 52: ('>H', ['on']), 54: ('>I', ['text_id']), 96: ('>HW', ['open_flags', 'key']), 152: ('>HH', ['mass', 'picture']), 58: ('>H', ['mass']), 55: ('>HIH', ['current_page', 'text_id', 'last_page']), 60: ('>HI', ['magic_type', 'magic_data']), 61: ('>H', ['mass']), 149: ('>HH', ['base', 'pattern']), 150: ('>HW', ['open_flags', 'key']), 63: ('>H', ['on']), 64: ('>H', ['scan_type']), #56: short sign, handled below #57: sign, handled below 95: ('>H', ['charge']), 70: ('>HH', ['on', 'tape']), 153: ('>HH', ['width', 'height']), 92: ('>HHHHHHHH', ['trapezoid_type', 'upper_left_x', 'upper_right_x', 'lower_left_x', 'lower_right_x', 'height', 'pattern_x_size','pattern_y_size']), # + pattern array 97: ('>HI', ['magic_type', 'magic_data']), 155: ('>HW', ['open_flags', 'key']), 74: ('>HI20s', ['state', 'take', 'address']), 75: ('>H', ['event']), 76: ('>W', ['denom']), 87: ('>HHHHHH', ['trapezoid_type', 'upper_left_x', 'upper_right_x', 'lower_left_x', 'lower_right_x', 'height']), 85: ('>HWHH', ['open_flags', 'key', 'item_price', 'display_item']), # + prices array 86: ('>HW', ['open_flags', 'key']), 80: ('>HH', ['length', 'height', 'pattern']), 82: ('>H', ['wind_level']), } def decode_properties(buf, fmt, keys): ''' Parse the properties from the given byte buffer, using the format string and names of keys for each item in the format string. Returns a dict of name/value pairs for all keys. ''' fat_words = [] # Handle fatwords, which are 16-bits stored as 00 xx 00 yy. if 'W' in fmt: # Hack: our fatword handling doesn't count repeated format strings idx = fmt.index('W') if fmt[:idx].isdigit(): raise ValueError('cant handle format strings with numbers') base = 1 if not fmt[0].isalpha() else 0 fmt_chars = [] for i, c in enumerate(fmt): if c == 'W': c = 'I' fat_words.append(keys[i - base]) fmt_chars.append(c) fmt = ''.join(fmt_chars) data = OrderedDict(zip( keys, struct.unpack(fmt, buf[:struct.calcsize(fmt)]))) # Replace each fat word with its actual value for name in fat_words: data[name] = ((data[name] >> 8) & 0xff00) | (data[name] & 0xff) return data def parse_array(buf, fmt, count): ''' Unpack a number of same-sized items into an array ''' items = [] item_size = struct.calcsize(fmt) for i in range(count): items += struct.unpack(fmt, buf[i * item_size:(i + 1) * item_size]) return items def decode_text(buf): ''' Decode a word-packed string (00 x 00 y ...), which is similar to a fatword but is a string instead of int. ''' return [buf[i] for i in range(1, len(buf), 2)] def parse_properties(cls, property_data): ''' Decode basic properties and then class-specific ones ''' data = OrderedDict() args = PARSERS.get(cls) if args: data.update(decode_properties(property_data, *args)) remainder_off = struct.calcsize(args[0].replace('W', 'I')) # Special class decoders for those not fully handled above if cls == 56: # short sign data['text'] = decode_text(property_data[:10 * 2]) elif cls == 57: # sign data['text'] = decode_text(property_data[:40 * 2]) elif cls == 18: # countertop: whoput = 5 ints n = 5 data['whoput'] = parse_array( property_data[remainder_off:remainder_off + n * 4], '>I', n) elif cls == 92: # super trapezoid: pattern = 32 halfwords n = 32 data['pattern'] = parse_array( property_data[remainder_off:remainder_off + n * 4], '>H', n) elif cls == 85: # vendo front: prices = 10 halfwords n = 10 data['prices'] = parse_array( property_data[remainder_off:remainder_off + n * 4], '>H', n) return data def decode_row(row): ''' Parse a single row and return a dict of the items ''' data = OrderedDict(zip(STRUCT_ITEMS, struct.unpack(FORMAT, row))) data.update(parse_properties(data['class'], data['property_data'])) # Debug-dump the Matchbook class #if data['class'] == 49: # print ' '.join('%02x' % ord(c) for c in row) # print data # These fields tend to be all padding for many objects. # Maybe these were deleted or superseded? data['deleted'] = (data['container'] == 0x20202020 and data['contype'] == 0x2020) # Always remove the raw property bytes, which we've decoded del data['property_data'] # Clear text data if it's unprintable if 'address' in data: if any(c >= 0x80 for c in data['address']): #print ' '.join('%02x' % ord(c) for c in row) #print data data['address'] = '' else: data['address'] = data['address'].decode('ascii') return data def main(): ''' Read each row from database and then decode it, dumping output to JSON ''' items = [] with open(sys.argv[1], "rb") as fp: while True: row = fp.read(struct.calcsize(FORMAT)) if not row: break items.append(decode_row(row)) with open(sys.argv[2], 'w') as fp: json.dump(items, fp, indent=2) if __name__ == '__main__': main()
[ "collections.OrderedDict", "json.dump", "struct.unpack", "struct.calcsize" ]
[((897, 920), 'struct.calcsize', 'struct.calcsize', (['FORMAT'], {}), '(FORMAT)\n', (912, 920), False, 'import json, struct, sys\n'), ((5375, 5395), 'struct.calcsize', 'struct.calcsize', (['fmt'], {}), '(fmt)\n', (5390, 5395), False, 'import json, struct, sys\n'), ((5850, 5863), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (5861, 5863), False, 'from collections import OrderedDict\n'), ((5440, 5498), 'struct.unpack', 'struct.unpack', (['fmt', 'buf[i * item_size:(i + 1) * item_size]'], {}), '(fmt, buf[i * item_size:(i + 1) * item_size])\n', (5453, 5498), False, 'import json, struct, sys\n'), ((8318, 8348), 'json.dump', 'json.dump', (['items', 'fp'], {'indent': '(2)'}), '(items, fp, indent=2)\n', (8327, 8348), False, 'import json, struct, sys\n'), ((7083, 7109), 'struct.unpack', 'struct.unpack', (['FORMAT', 'row'], {}), '(FORMAT, row)\n', (7096, 7109), False, 'import json, struct, sys\n'), ((8157, 8180), 'struct.calcsize', 'struct.calcsize', (['FORMAT'], {}), '(FORMAT)\n', (8172, 8180), False, 'import json, struct, sys\n'), ((5046, 5066), 'struct.calcsize', 'struct.calcsize', (['fmt'], {}), '(fmt)\n', (5061, 5066), False, 'import json, struct, sys\n')]
# # Copyright (c) 2021 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from grpc import StatusCode from numpy import array, float64, int32, int8, float128, float32 from ovmsclient.tfs_compat.base.errors import ModelNotFoundError, InvalidInputError from config import CallCount, PATH_VALID # noqa from tensorflow.core.framework.tensor_pb2 import TensorProto from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto from tensorflow_serving.apis.get_model_status_pb2 import ModelVersionStatus from tensorflow.core.framework.types_pb2 import DataType from tensorflow.core.protobuf.error_codes_pb2 import Code as ErrorCode from tensorflow_serving.apis.get_model_status_pb2 import GetModelStatusRequest from tensorflow_serving.apis.get_model_metadata_pb2 import GetModelMetadataRequest from tensorflow_serving.apis.predict_pb2 import PredictRequest from ovmsclient.tfs_compat.grpc.requests import (GrpcModelMetadataRequest, GrpcModelStatusRequest, GrpcPredictRequest) # responses_dict = { # model_version: { expected_status } # } MODEL_STATUS_RESPONSE_VALID = [ { 1: { "state": ModelVersionStatus.State.AVAILABLE, "error_code": ErrorCode.OK, "error_message": "OK" } }, { 2: { "state": ModelVersionStatus.State.END, "error_code": ErrorCode.OK, "error_message": "OK" }, 3: { "state": ModelVersionStatus.State.AVAILABLE, "error_code": ErrorCode.OK, "error_message": "" } }, { 1: { "state": ModelVersionStatus.State.START, "error_code": ErrorCode.OK, "error_message": "" }, 2: { "state": ModelVersionStatus.State.LOADING, "error_code": ErrorCode.UNKNOWN, "error_message": "Could not load CNN" }, 3: { "state": ModelVersionStatus.State.UNLOADING, "error_code": ErrorCode.OK, "error_message": "" } } ] # response_dict = { # 'version': model_version, # 'name': model_name, # 'inputs': inputs_dict, # 'outputs': outputs_dict # } MODEL_METADATA_RESPONSE_VALID = [ { 'version': 2, 'name': 'resnet', 'inputs': { '0': { 'shape': [1, 3, 244, 244], 'dtype': DataType.DT_FLOAT } }, 'outputs': { '1463': { 'shape': [1, 1000], 'dtype': DataType.DT_FLOAT } } }, { 'version': 1, 'name': 'model_name', 'inputs': { '0': { 'shape': [1, 3, 244, 244], 'dtype': DataType.DT_FLOAT }, '1': { 'shape': [0, 1, 3, 244, 244], 'dtype': DataType.DT_INT32 } }, 'outputs': { '1463': { 'shape': [1, 1000], 'dtype': DataType.DT_FLOAT }, 'second_output': { 'shape': [0, 1, 1000], 'dtype': DataType.DT_INT32 } } }, { 'version': 1, 'name': 'model_name', 'inputs': { 'input1': { 'shape': [1, 3, 1080, 1920], 'dtype': DataType.DT_QINT32 }, 'input2': { 'shape': [1, 3, 244, 244], 'dtype': DataType.DT_INT32 } }, 'outputs': { 'single_output': { 'shape': [1, 7, 200, 200], 'dtype': DataType.DT_FLOAT } } } ] # (inputs_dict, # model_name, model_version, expected_exception, expected_message) PREDICT_REQUEST_INVALID_INPUTS = [ ([], 'model_name', 0, TypeError, "inputs type should be dict, but is list"), (('input1', [1, 2, 3]), 'model_name', 0, TypeError, "inputs type should be dict, but is tuple"), ({ 1: [1, 2, 3], "input2": [1, 2] }, 'model_name', 0, TypeError, "inputs keys type should be str, but found int"), ({ "input1": [[1.0, 2.0], [1.0, 2.0, 3.0]] }, 'model_name', 0, ValueError, ("argument must be a dense tensor: [[1.0, 2.0], [1.0, 2.0, 3.0]] - " "got shape [2], but wanted [2, 2]")), ({ "input1": [[(1, 2, 3)], [(1, 2)], [(1, 2, 3)]] }, 'model_name', 0, TypeError, "provided values type is not valid"), ({ "input1": float128(2.5) }, 'model_name', 0, TypeError, "provided values type is not valid"), ({ "input1": (1, 2, 3) }, 'model_name', 0, TypeError, "values type should be (list, np.ndarray, scalar), but is tuple"), ({ "input1": [ [bytes([0x13, 0x00, 0x00, 0x00, 0x08, 0x00]), bytes([0x13, 0x00, 0x00, 0x00, 0x08, 0x00])], [bytes([0x13, 0x00, 0x00, 0x00, 0x08, 0x00]), bytes([0x13, 0x00, 0x00, 0x00, 0x08, 0x00])] ] }, 'model_name', 0, ValueError, "bytes values with dtype DT_STRING must be in shape [N]"), ] # (inputs_dict, # expected_proto_dict, # model_name, model_version) PREDICT_REQUEST_VALID = [ ({ "input1": [1, 2, 3], "input2": array([1.0, 2.0, 3.0]), "input3": [[int32(3), int32(1)], [int32(4), int32(16)]], }, { "input1": { "field": "tensor_content", "shape": TensorShapeProto(dim=[TensorShapeProto.Dim(size=3)]), "dtype": DataType.DT_INT32, 'value': array([1, 2, 3], dtype=int32).tobytes() }, "input2": { "field": "tensor_content", "shape": TensorShapeProto(dim=[TensorShapeProto.Dim(size=3)]), "dtype": DataType.DT_DOUBLE, 'value': array([1.0, 2.0, 3.0]).tobytes() }, "input3": { "field": "tensor_content", "shape": TensorShapeProto(dim=[TensorShapeProto.Dim(size=2), TensorShapeProto.Dim(size=2)]), "dtype": DataType.DT_INT32, 'value': array([[int32(3), int32(1)], [int32(4), int32(16)]]).tobytes() }, }, 'model_name', 0), ({ "input1": TensorProto(dtype=DataType.DT_INT8, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=2), TensorShapeProto.Dim(size=3)]), tensor_content=array([1, 2, 3, 4, 5, 6]).tobytes()), "input2": 5.0, "input3": bytes([1, 2, 3]) }, { "input2": { "field": "float_val", "shape": TensorShapeProto(dim=[TensorShapeProto.Dim(size=1)]), "dtype": DataType.DT_FLOAT, 'value': array([5.0], dtype=float32) }, "input3": { "field": "string_val", "shape": TensorShapeProto(dim=[TensorShapeProto.Dim(size=1)]), "dtype": DataType.DT_STRING, 'value': [bytes([1, 2, 3])] } }, 'model_name', 0), ({ }, { }, 'model_name', 0) ] # (response_outputs_dict, model_name, model_version, expected_outputs) PREDICT_RESPONSE_VALID = [ ({ "1463": TensorProto(dtype=DataType.DT_INT8, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=3)]), tensor_content=array([1, 2, 3], dtype=int8).tobytes()), }, "model_name", 0, array([1, 2, 3], dtype=int8) ), ({ "1463": TensorProto(dtype=DataType.DT_INT32, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=2), TensorShapeProto.Dim(size=3)]), tensor_content=array([1, 2, 3, 4, 5, 6], dtype=int32).tobytes()), "2": TensorProto(dtype=DataType.DT_DOUBLE, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=1)]), double_val=array([12.0], dtype=float64)), }, "model_name", 0, { "1463": array([[1, 2, 3], [4, 5, 6]], dtype=int32), "2": array([12.0], dtype=float64) }), ({ "1463": TensorProto(dtype=DataType.DT_STRING, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=2)]), string_val=[bytes([1, 2, 3]), bytes([4, 5])]), "2": TensorProto(dtype=DataType.DT_STRING, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=1)]), string_val=[bytes([1, 2, 3])]), }, "model_name", 0, { "1463": [bytes([1, 2, 3]), bytes([4, 5])], "2": [bytes([1, 2, 3])] }), ] # (response_outputs_dict, model_name, model_version, expected_exception, expected_message) PREDICT_RESPONSE_TENSOR_TYPE_INVALID = [ ({ "1463": TensorProto(), }, "model_name", 0, TypeError, "Unsupported tensor type: 0"), ({ "1463": TensorProto(dtype=DataType.DT_INVALID), }, "model_name", 0, TypeError, "Unsupported tensor type: 0"), ({ "1463": TensorProto(dtype=DataType.DT_RESOURCE), }, "model_name", 0, TypeError, "Unsupported tensor type: 20"), ] # ({"model_name": model_name, "model_version": model_version, # "raw_request_model_name": raw_request_model_name, "raw_request_model_version": raw_request_model_version})# noqa : E501 MODEL_STATUS_REQUEST_VALID = [ ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0}), ] # ({"model_name": model_name, "model_version": model_version, # "raw_request_model_name": raw_request_model_name, "raw_request_model_version": raw_request_model_version},# noqa : E501 # expected_exception, expected_message) MODEL_STATUS_REQUEST_INVALID_RAW_REQUEST = [ ({"model_name": "name", "model_version": 0, "raw_request_model_name": "other_name", "raw_request_model_version": 0}, ValueError, 'request is not valid GrpcModelStatusRequest'), ({"model_name": "other_name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0}, ValueError, 'request is not valid GrpcModelStatusRequest'), ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 1}, ValueError, 'request is not valid GrpcModelStatusRequest'), ({"model_name": "name", "model_version": 1, "raw_request_model_name": "name", "raw_request_model_version": 0}, ValueError, 'request is not valid GrpcModelStatusRequest'), ] # (request, expeceted_exception, expected_message) MODEL_STATUS_REQUEST_INVALID_REQUEST_TYPE = [ (None, TypeError, "request type should be GrpcModelStatusRequest, but is NoneType"), (GetModelStatusRequest(), TypeError, "request type should be GrpcModelStatusRequest, but is GetModelStatusRequest"), (GrpcModelStatusRequest('model_name', 0, 'raw_request'), TypeError, "request is not valid GrpcModelStatusRequest") ] # (grpc_error_status_code, grpc_error_details, raised_error_type, raised_error_message) COMMON_INVALID_GRPC = [ (StatusCode.UNAVAILABLE, "failed to connect to all adresses", ConnectionError, "Error occurred during handling the request: " "failed to connect to all adresses"), (StatusCode.UNAVAILABLE, "Empty update", ConnectionError, "Error occurred during handling the request: Empty update"), (StatusCode.DEADLINE_EXCEEDED, "Deadline Exceeded", TimeoutError, "Error occurred during handling the request: " "Request handling exceeded timeout"), (StatusCode.NOT_FOUND, "Model with requested version is not found", ModelNotFoundError, "Error occurred during handling the request: " "Model with requested version is not found"), (StatusCode.NOT_FOUND, "Model with requested name is not found", ModelNotFoundError, "Error occurred during handling the request: " "Model with requested name is not found"), ] # ({"model_name": model_name, "model_version": model_version, # "raw_request_model_name": raw_request_model_name, "raw_request_model_version": raw_request_model_version,# noqa : E501 # "metadata_field_list": raw_request_metadata_fields}) MODEL_METADATA_REQUEST_VALID = [ ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0, "metadata_field_list": ["signature_def"]}), ] # ({"model_name": model_name, "model_version": model_version, # "raw_request_model_name": raw_request_model_name, "raw_request_model_version": raw_request_model_version,# noqa : E501 # "metadata_field_list": raw_request_metadata_fields}, # expected_exception, expected_message) MODEL_METADATA_REQUEST_INVALID_RAW_REQUEST = [ ({"model_name": "name", "model_version": 0, "raw_request_model_name": "other_name", "raw_request_model_version": 0, "metadata_field_list": ["signature_def"]}, ValueError, 'request is not valid GrpcModelMetadataRequest'), ({"model_name": "other_name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0, "metadata_field_list": ["signature_def"]}, ValueError, 'request is not valid GrpcModelMetadataRequest'), ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 1, "metadata_field_list": ["signature_def"]}, ValueError, 'request is not valid GrpcModelMetadataRequest'), ({"model_name": "name", "model_version": 1, "raw_request_model_name": "name", "raw_request_model_version": 0, "metadata_field_list": ["signature_def"]}, ValueError, 'request is not valid GrpcModelMetadataRequest'), ({"model_name": "name", "model_version": 1, "raw_request_model_name": "name", "raw_request_model_version": 1, "metadata_field_list": ["invalid"]}, ValueError, 'request is not valid GrpcModelMetadataRequest'), ] # (request, expected_exception, expected_message) MODEL_METADATA_REQUEST_INVALID_REQUEST_TYPE = [ (None, TypeError, "request type should be GrpcModelMetadataRequest, but is NoneType"), (GetModelMetadataRequest(), TypeError, "request type should be GrpcModelMetadataRequest, but is GetModelMetadataRequest"), (GrpcModelMetadataRequest('model_name', 0, 'raw_request'), TypeError, "request is not valid GrpcModelMetadataRequest") ] # ({"model_name": model_name, "model_version": model_version, # "raw_request_model_name": raw_request_model_name, "raw_request_model_version": raw_request_model_version,# noqa : E501 # "inputs_dict": inputs_for_request, "raw_request_inputs_dict": inputs_for_raw_request}) PREDICT_REQUEST_VALID_SPEC = [ ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0, "inputs_dict": { "0": TensorProto(dtype=DataType.DT_INT8, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=3)]), tensor_content=array([1, 2, 3]).tobytes()) }, "raw_request_inputs_dict": { "0": TensorProto(dtype=DataType.DT_INT8, tensor_shape=TensorShapeProto(dim=[TensorShapeProto.Dim(size=3)]), tensor_content=array([1, 2, 3]).tobytes()) }}), ] # ({"model_name": model_name, "model_version": model_version, # "raw_request_model_name": raw_request_model_name, "raw_request_model_version": raw_request_model_version,# noqa : E501 # "inputs_dict": inputs_for_request, "raw_request_inputs_dict": inputs_for_raw_request}, # expected_exception, expected_message) PREDICT_REQUEST_INVALID_SPEC_RAW_REQUEST = [ ({"model_name": "name", "model_version": 0, "raw_request_model_name": "other_name", "raw_request_model_version": 0, "inputs_dict": { "0": TensorProto() }, "raw_request_inputs_dict": { "0": TensorProto() }}, ValueError, 'request is not valid GrpcPredictRequest'), ({"model_name": "other_name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0, "inputs_dict": { "0": TensorProto() }, "raw_request_inputs_dict": { "0": TensorProto() }}, ValueError, 'request is not valid GrpcPredictRequest'), ({"model_name": "name", "model_version": 1, "raw_request_model_name": "name", "raw_request_model_version": 0, "inputs_dict": { "0": TensorProto() }, "raw_request_inputs_dict": { "0": TensorProto() }}, ValueError, 'request is not valid GrpcPredictRequest'), ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 1, "inputs_dict": { "0": TensorProto() }, "raw_request_inputs_dict": { "0": TensorProto() }}, ValueError, 'request is not valid GrpcPredictRequest'), ({"model_name": "name", "model_version": 0, "raw_request_model_name": "name", "raw_request_model_version": 0, "inputs_dict": { "0": TensorProto() }, "raw_request_inputs_dict": { "1": TensorProto() }}, ValueError, 'request is not valid GrpcPredictRequest'), ] # (predict_request, expected_exception, expected_message) PREDICT_REQUEST_INVALID_SPEC_TYPE = [ (None, TypeError, 'request type should be GrpcPredictRequest, but is NoneType'), (PredictRequest(), TypeError, 'request type should be GrpcPredictRequest, but is PredictRequest'), (GrpcPredictRequest({}, "model_name", 0, "raw_request"), TypeError, 'request is not valid GrpcPredictRequest'), ] # (grpc_error_status_code, grpc_error_details, raised_error_type, raised_error_message) PREDICT_INVALID_GRPC = COMMON_INVALID_GRPC + [ (StatusCode.INVALID_ARGUMENT, "Invalid input precision - Expected: FP32; Actual: I64", InvalidInputError, "Error occurred during handling the request: " "Invalid input precision - Expected: FP32; Actual: I64"), (StatusCode.INVALID_ARGUMENT, "Invalid number of inputs - Expected: 1; Actual: 0", InvalidInputError, "Error occurred during handling the request: " "Invalid number of inputs - Expected: 1; Actual: 0"), (StatusCode.INVALID_ARGUMENT, "Missing input with specific name - Required input: 0", InvalidInputError, "Error occurred during handling the request: " "Missing input with specific name - Required input: 0"), (StatusCode.INVALID_ARGUMENT, "Invalid number of shape dimensions - " "Expected: (1,3,224,224); Actual: (3)", InvalidInputError, "Error occurred during handling the request: " "Invalid number of shape dimensions - Expected: (1,3,224,224); " "Actual: (3)"), ] # (config_dict, # method_call_count_dict= {"method_name": CallCount.NumberOfCalls}) BUILD_VALID = [ ( { "url": "localhost:9000" }, { "_check_url": CallCount.ONE, "_check_tls_config": CallCount.ZERO, "_prepare_certs": CallCount.ZERO } ), ( { "url": "172.16.17.32:1" }, { "_check_url": CallCount.ONE, "_check_tls_config": CallCount.ZERO, "_prepare_certs": CallCount.ZERO } ), ( { "url": f"cluster.cloud.iotg.intel.com:{2**16-1}" }, { "_check_url": CallCount.ONE, "_check_tls_config": CallCount.ZERO, "_prepare_certs": CallCount.ZERO } ), ( { "url": "localhost:9000", "tls_config": { "server_cert_path": "valid_path" } }, { "_check_url": CallCount.ONE, "_check_tls_config": CallCount.ONE, "_prepare_certs": CallCount.ONE } ), ( { "url": "localhost:9000", "tls_config": { "client_key_path": PATH_VALID, "client_cert_path": PATH_VALID, "server_cert_path": PATH_VALID } }, { "_check_url": CallCount.ONE, "_check_tls_config": CallCount.ONE, "_prepare_certs": CallCount.ONE } ) ] # (config_dict, # method_call_dict= {"method_name": (CallCount.NumberOfCalls, error_raised)}, # expected_exception, expected_message) BUILD_INVALID_CONFIG = [ ( { "url": "localhost" }, { "_check_url": (CallCount.ONE, ValueError("url must be a string " "in format <address>:<port>")), "_check_tls_config": (CallCount.ZERO, None), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, "url must be a string in format <address>:<port>" ), ( { "url": 123 }, { "_check_url": (CallCount.ONE, TypeError("url must be a string " "in format <address>:<port>")), "_check_tls_config": (CallCount.ZERO, None), "_prepare_certs": (CallCount.ZERO, None) }, TypeError, "url must be a string in format <address>:<port>" ), ( { "url": "address:9000", }, { "_check_url": (CallCount.ONE, ValueError("address is not valid")), "_check_tls_config": (CallCount.ZERO, None), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, "address is not valid" ), ( { "url": "localhost:port" }, { "_check_url": (CallCount.ONE, TypeError("port should be of type int")), "_check_tls_config": (CallCount.ZERO, None), "_prepare_certs": (CallCount.ZERO, None) }, TypeError, "port should be of type int" ), ( { "url": f"localhost:{2**16}" }, { "_check_url": (CallCount.ONE, ValueError(f"port should be in range <0, {2**16-1}>")), "_check_tls_config": (CallCount.ZERO, None), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, f"port should be in range <0, {2**16-1}>" ), ( { "url": "localhost:9000", "tls_config": 123 }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, TypeError("tls_config should be of type dict")), "_prepare_certs": (CallCount.ZERO, None) }, TypeError, "tls_config should be of type dict" ), ( { "url": "localhost:9000", "tls_config": { } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, ValueError("server_cert_path is not defined " "in tls_config")), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, "server_cert_path is not defined in tls_config" ), ( { "url": "10.20.30.40:1000", "tls_config": { "server_cert_path": PATH_VALID, "client_key_path": PATH_VALID } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, ValueError("none or both client_key_path " "and client_cert_path are required " "in tls_config")), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, "none or both client_key_path and client_cert_path are required in tls_config" ), ( { "url": "localhost:9000", "tls_config": { "server_cert_path": PATH_VALID, "client_key_path": PATH_VALID, "client_cert_path": PATH_VALID, "invalid_key_name": PATH_VALID } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, ValueError("invalid_key_name is " "not valid tls_config key")), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, "invalid_key_name is not valid tls_config key" ), ( { "url": "localhost:9000", "tls_config": { "server_cert_path": PATH_VALID, "client_key_path": PATH_VALID, "client_cert_path": 123, } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, TypeError("client_cert_path type should be string " "but is type int")), "_prepare_certs": (CallCount.ZERO, None) }, TypeError, "client_cert_path type should be string but is type int" ), ( { "url": "localhost:9000", "tls_config": { "server_cert_path": PATH_VALID, "client_key_path": "invalid_path", "client_cert_path": PATH_VALID, } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, ValueError("invalid_path is not valid " "path to file")), "_prepare_certs": (CallCount.ZERO, None) }, ValueError, "invalid_path is not valid path to file" ), ] # (config_dict, # method_call_dict= {"method_name": (CallCount.NumberOfCalls, error_raised)}, # expected_exception, expected_message) BUILD_INVALID_CERTS = [ ( { "url": "localhost:9000", "tls_config": { "server_cert_path": PATH_VALID, "client_key_path": "path_to_invalid_private_key", "client_cert_path": PATH_VALID, } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, None), "_prepare_certs": (CallCount.ONE, ValueError("path_to_invalid_private_key file " "is not valid private key")) }, ValueError, "path_to_invalid_private_key file is not valid private key" ), ( { "url": "localhost:9000", "tls_config": { "server_cert_path": "path_to_invalid_server_certificate", "client_key_path": PATH_VALID, "client_cert_path": PATH_VALID, } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, None), "_prepare_certs": (CallCount.ONE, ValueError("path_to_invalid_server_certificate " "is not valid certificate")) }, ValueError, "path_to_invalid_server_certificate is not valid certificate" ), ( { "url": "localhost:9000", "tls_config": { "server_cert_path": PATH_VALID, "client_key_path": PATH_VALID, "client_cert_path": "path_to_invalid_client_certificate", } }, { "_check_url": (CallCount.ONE, None), "_check_tls_config": (CallCount.ONE, None), "_prepare_certs": (CallCount.ONE, ValueError("path_to_invalid_client_certificate " "is not valid certificate")) }, ValueError, "path_to_invalid_client_certificate is not valid certificate" ), ]
[ "tensorflow_serving.apis.get_model_metadata_pb2.GetModelMetadataRequest", "tensorflow_serving.apis.predict_pb2.PredictRequest", "tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim", "tensorflow.core.framework.tensor_pb2.TensorProto", "ovmsclient.tfs_compat.grpc.requests.GrpcModelStatusRequest", "tensorflow_serving.apis.get_model_status_pb2.GetModelStatusRequest", "numpy.array", "numpy.int32", "numpy.float128", "ovmsclient.tfs_compat.grpc.requests.GrpcModelMetadataRequest", "ovmsclient.tfs_compat.grpc.requests.GrpcPredictRequest" ]
[((8055, 8083), 'numpy.array', 'array', (['[1, 2, 3]'], {'dtype': 'int8'}), '([1, 2, 3], dtype=int8)\n', (8060, 8083), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((11394, 11417), 'tensorflow_serving.apis.get_model_status_pb2.GetModelStatusRequest', 'GetModelStatusRequest', ([], {}), '()\n', (11415, 11417), False, 'from tensorflow_serving.apis.get_model_status_pb2 import GetModelStatusRequest\n'), ((11520, 11574), 'ovmsclient.tfs_compat.grpc.requests.GrpcModelStatusRequest', 'GrpcModelStatusRequest', (['"""model_name"""', '(0)', '"""raw_request"""'], {}), "('model_name', 0, 'raw_request')\n", (11542, 11574), False, 'from ovmsclient.tfs_compat.grpc.requests import GrpcModelMetadataRequest, GrpcModelStatusRequest, GrpcPredictRequest\n'), ((14844, 14869), 'tensorflow_serving.apis.get_model_metadata_pb2.GetModelMetadataRequest', 'GetModelMetadataRequest', ([], {}), '()\n', (14867, 14869), False, 'from tensorflow_serving.apis.get_model_metadata_pb2 import GetModelMetadataRequest\n'), ((14976, 15032), 'ovmsclient.tfs_compat.grpc.requests.GrpcModelMetadataRequest', 'GrpcModelMetadataRequest', (['"""model_name"""', '(0)', '"""raw_request"""'], {}), "('model_name', 0, 'raw_request')\n", (15000, 15032), False, 'from ovmsclient.tfs_compat.grpc.requests import GrpcModelMetadataRequest, GrpcModelStatusRequest, GrpcPredictRequest\n'), ((18158, 18174), 'tensorflow_serving.apis.predict_pb2.PredictRequest', 'PredictRequest', ([], {}), '()\n', (18172, 18174), False, 'from tensorflow_serving.apis.predict_pb2 import PredictRequest\n'), ((18266, 18320), 'ovmsclient.tfs_compat.grpc.requests.GrpcPredictRequest', 'GrpcPredictRequest', (['{}', '"""model_name"""', '(0)', '"""raw_request"""'], {}), "({}, 'model_name', 0, 'raw_request')\n", (18284, 18320), False, 'from ovmsclient.tfs_compat.grpc.requests import GrpcModelMetadataRequest, GrpcModelStatusRequest, GrpcPredictRequest\n'), ((5079, 5092), 'numpy.float128', 'float128', (['(2.5)'], {}), '(2.5)\n', (5087, 5092), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((5824, 5846), 'numpy.array', 'array', (['[1.0, 2.0, 3.0]'], {}), '([1.0, 2.0, 3.0])\n', (5829, 5846), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((8681, 8723), 'numpy.array', 'array', (['[[1, 2, 3], [4, 5, 6]]'], {'dtype': 'int32'}), '([[1, 2, 3], [4, 5, 6]], dtype=int32)\n', (8686, 8723), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((8738, 8766), 'numpy.array', 'array', (['[12.0]'], {'dtype': 'float64'}), '([12.0], dtype=float64)\n', (8743, 8766), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((9482, 9495), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (9493, 9495), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((9586, 9624), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {'dtype': 'DataType.DT_INVALID'}), '(dtype=DataType.DT_INVALID)\n', (9597, 9624), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((9715, 9754), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {'dtype': 'DataType.DT_RESOURCE'}), '(dtype=DataType.DT_RESOURCE)\n', (9726, 9754), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((7364, 7391), 'numpy.array', 'array', (['[5.0]'], {'dtype': 'float32'}), '([5.0], dtype=float32)\n', (7369, 7391), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((16561, 16574), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (16572, 16574), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((16634, 16647), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (16645, 16647), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((16878, 16891), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (16889, 16891), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((16951, 16964), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (16962, 16964), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((17189, 17202), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (17200, 17202), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((17262, 17275), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (17273, 17275), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((17500, 17513), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (17511, 17513), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((17573, 17586), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (17584, 17586), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((17811, 17824), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (17822, 17824), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((17884, 17897), 'tensorflow.core.framework.tensor_pb2.TensorProto', 'TensorProto', ([], {}), '()\n', (17895, 17897), False, 'from tensorflow.core.framework.tensor_pb2 import TensorProto\n'), ((5868, 5876), 'numpy.int32', 'int32', (['(3)'], {}), '(3)\n', (5873, 5876), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((5878, 5886), 'numpy.int32', 'int32', (['(1)'], {}), '(1)\n', (5883, 5886), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((5890, 5898), 'numpy.int32', 'int32', (['(4)'], {}), '(4)\n', (5895, 5898), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((5900, 5909), 'numpy.int32', 'int32', (['(16)'], {}), '(16)\n', (5905, 5909), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((8608, 8636), 'numpy.array', 'array', (['[12.0]'], {'dtype': 'float64'}), '([12.0], dtype=float64)\n', (8613, 8636), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6117, 6146), 'numpy.array', 'array', (['[1, 2, 3]'], {'dtype': 'int32'}), '([1, 2, 3], dtype=int32)\n', (6122, 6146), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6364, 6386), 'numpy.array', 'array', (['[1.0, 2.0, 3.0]'], {}), '([1.0, 2.0, 3.0])\n', (6369, 6386), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6024, 6052), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (6044, 6052), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((6270, 6298), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (6290, 6298), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((6510, 6538), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(2)'}), '(size=2)\n', (6530, 6538), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((6583, 6611), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(2)'}), '(size=2)\n', (6603, 6611), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((7069, 7094), 'numpy.array', 'array', (['[1, 2, 3, 4, 5, 6]'], {}), '([1, 2, 3, 4, 5, 6])\n', (7074, 7094), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((7271, 7299), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(1)'}), '(size=1)\n', (7291, 7299), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((7501, 7529), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(1)'}), '(size=1)\n', (7521, 7529), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((7990, 8018), 'numpy.array', 'array', (['[1, 2, 3]'], {'dtype': 'int8'}), '([1, 2, 3], dtype=int8)\n', (7995, 8018), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((8378, 8416), 'numpy.array', 'array', (['[1, 2, 3, 4, 5, 6]'], {'dtype': 'int32'}), '([1, 2, 3, 4, 5, 6], dtype=int32)\n', (8383, 8416), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((15737, 15753), 'numpy.array', 'array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (15742, 15753), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((15996, 16012), 'numpy.array', 'array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (16001, 16012), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6902, 6930), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(2)'}), '(size=2)\n', (6922, 6930), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((6992, 7020), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (7012, 7020), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((7915, 7943), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (7935, 7943), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((8215, 8243), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(2)'}), '(size=2)\n', (8235, 8243), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((8303, 8331), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (8323, 8331), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((8540, 8568), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(1)'}), '(size=1)\n', (8560, 8568), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((8900, 8928), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(2)'}), '(size=2)\n', (8920, 8928), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((9118, 9146), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(1)'}), '(size=1)\n', (9138, 9146), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((15663, 15691), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (15683, 15691), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((15922, 15950), 'tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto.Dim', 'TensorShapeProto.Dim', ([], {'size': '(3)'}), '(size=3)\n', (15942, 15950), False, 'from tensorflow.core.framework.tensor_shape_pb2 import TensorShapeProto\n'), ((6684, 6692), 'numpy.int32', 'int32', (['(3)'], {}), '(3)\n', (6689, 6692), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6694, 6702), 'numpy.int32', 'int32', (['(1)'], {}), '(1)\n', (6699, 6702), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6706, 6714), 'numpy.int32', 'int32', (['(4)'], {}), '(4)\n', (6711, 6714), False, 'from numpy import array, float64, int32, int8, float128, float32\n'), ((6716, 6725), 'numpy.int32', 'int32', (['(16)'], {}), '(16)\n', (6721, 6725), False, 'from numpy import array, float64, int32, int8, float128, float32\n')]
from uber_rides.session import Session from uber_rides.client import UberRidesClient #Add the token session = Session(server_token='') def getPriceEstimate(start_lat,start_long,end_lat,end_long): client = UberRidesClient(session) p=client.get_price_estimates(start_lat,start_long,end_lat,end_long) key=str(start_lat)+"|"+str(start_long)+"|"+str(end_lat)+"|"+str(end_long) return str(p.json.get('prices')),key
[ "uber_rides.client.UberRidesClient", "uber_rides.session.Session" ]
[((112, 136), 'uber_rides.session.Session', 'Session', ([], {'server_token': '""""""'}), "(server_token='')\n", (119, 136), False, 'from uber_rides.session import Session\n'), ((210, 234), 'uber_rides.client.UberRidesClient', 'UberRidesClient', (['session'], {}), '(session)\n', (225, 234), False, 'from uber_rides.client import UberRidesClient\n')]
import json import logging import random import time import configparser import logging.handlers from datetime import datetime, timezone sample_data = { "timestamp": "", # ISO Zulu date format "equip_name": "X-Machine", "feed_rate": 0.0, "shaft_speed": 0, "oil_temperature": 0.0, "voltage": 0 } def main(): loop = True print("Starting program...") config = configparser.ConfigParser() config.read('./load-json-data.config') backup_count = config.getint('Data', 'BackupCount') hourly_interval = config.getint('Data', 'HourlyInterval') output_file = config['Data']['WriteDirectory'] + '/' + config['Data']['WriteFile'] sleep_in_seconds = config.getint('Data', 'SleepInSeconds') # format the log entries, making use of the log rotation functions already written in python formatter = logging.Formatter('%(message)s') handler = logging.handlers.TimedRotatingFileHandler( output_file, when='H', interval=hourly_interval, backupCount=backup_count) handler.setFormatter(formatter) logger = logging.getLogger("data") logger.addHandler(handler) logger.setLevel(logging.INFO) while loop: time.sleep(sleep_in_seconds) sample_data['timestamp'] = datetime.now(timezone.utc).isoformat() sample_data['feed_rate'] = float(random.randint(2000, 3000))/100 sample_data['shaft_speed'] = random.randint(20, 30) sample_data['oil_temperature'] = float(random.randint(1000, 1200))/100 sample_data['voltage'] = random.randint(425, 430) logger.info(json.dumps(sample_data)) print('wrote data to disc') if __name__ == "__main__": main()
[ "random.randint", "json.dumps", "time.sleep", "logging.Formatter", "logging.handlers.TimedRotatingFileHandler", "configparser.ConfigParser", "datetime.datetime.now", "logging.getLogger" ]
[((396, 423), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (421, 423), False, 'import configparser\n'), ((850, 882), 'logging.Formatter', 'logging.Formatter', (['"""%(message)s"""'], {}), "('%(message)s')\n", (867, 882), False, 'import logging\n'), ((897, 1018), 'logging.handlers.TimedRotatingFileHandler', 'logging.handlers.TimedRotatingFileHandler', (['output_file'], {'when': '"""H"""', 'interval': 'hourly_interval', 'backupCount': 'backup_count'}), "(output_file, when='H', interval=\n hourly_interval, backupCount=backup_count)\n", (938, 1018), False, 'import logging\n'), ((1073, 1098), 'logging.getLogger', 'logging.getLogger', (['"""data"""'], {}), "('data')\n", (1090, 1098), False, 'import logging\n'), ((1189, 1217), 'time.sleep', 'time.sleep', (['sleep_in_seconds'], {}), '(sleep_in_seconds)\n', (1199, 1217), False, 'import time\n'), ((1402, 1424), 'random.randint', 'random.randint', (['(20)', '(30)'], {}), '(20, 30)\n', (1416, 1424), False, 'import random\n'), ((1537, 1561), 'random.randint', 'random.randint', (['(425)', '(430)'], {}), '(425, 430)\n', (1551, 1561), False, 'import random\n'), ((1582, 1605), 'json.dumps', 'json.dumps', (['sample_data'], {}), '(sample_data)\n', (1592, 1605), False, 'import json\n'), ((1253, 1279), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (1265, 1279), False, 'from datetime import datetime, timezone\n'), ((1333, 1359), 'random.randint', 'random.randint', (['(2000)', '(3000)'], {}), '(2000, 3000)\n', (1347, 1359), False, 'import random\n'), ((1472, 1498), 'random.randint', 'random.randint', (['(1000)', '(1200)'], {}), '(1000, 1200)\n', (1486, 1498), False, 'import random\n')]
""" Module containing all general purpose functions shared by other modules. This module is not intended for the direct use by a User. Therefore, I will only docstring functions if I see fit to do so. LOG --- 11/07/18 Changed the way vector path is analysed. Now, the initial analysis is done with the geometrical formula for line-sphere intersection. Only the remaining vestors that do not intersect any van der Waals spheres are then analysed in the old way. 27/07/17 Fixed the cartesian coordinates -> fractional coordinates -> cartesian coordinates conversion related functions, creation of lattice array from unit cell parameters (triclinic system: so applicable to any) and conversion back to unit cell parameters. WORKS! inspiration from: http://www.ruppweb.org/Xray/tutorial/Coordinate%20system%20transformation.htm 26/07/17 Changed the way bonds are determined. Now, rather then fixed value a formula and covalent radii are used as explained in the Elemental_Radii spreadsheet (see tables module). TO DO LIST ---------- - Fix and validate calculating shape descriptors: asphericity, acylindricity and the realtive shape anisotropy. (Not working at the moment) - In the find_windows() function, maybe change the way the EPS value for the DBSCAN() is estimates. Need to look how the distances change with the increase in size of the sampling sphere. (validate this with the MongoDB) """ import numpy as np from copy import deepcopy from multiprocessing import Pool from scipy.optimize import brute, fmin, minimize from sklearn.cluster import DBSCAN from sklearn.metrics.pairwise import euclidean_distances from sklearn.neighbors import KDTree from .tables import ( atomic_mass, atomic_vdw_radius, opls_atom_keys, atomic_covalent_radius ) class _AtomKeyError(Exception): def __init__(self, message): self.message = message class _AtomKeyConflict(Exception): def __init__(self, message): self.message = message class _ForceFieldError(Exception): def __init__(self, message): self.message = message class _FunctionError(Exception): def __init__(self, message): self.message = message def is_number(number): """ Return True if an object is a number - can be converted into a float. Parameters ---------- number : any Returns ------- bool True if input is a float convertable (a number), False otherwise. """ try: float(number) return True except ValueError: return False def unique(input_list): """ Return a list of unique items (similar to set functionality). Parameters ---------- input_list : list A list containg some items that can occur more than once. Returns ------- list A list with only unique occurances of an item. """ output = [] for item in input_list: if item not in output: output.append(item) return output def to_list(obj): """ """ if isinstance(obj, np.ndarray): return obj.tolist() raise TypeError('Not serializable') def distance(a, b): """ Return the distance between two vectors (points) a and b. Parameters ---------- a : numpy.ndarray First vector. b : numpy.ndarray Second vector. Returns ------- numpy.float64 A distance between two vectors (points). """ return (np.sum((a - b)**2))**0.5 def molecular_weight(elements): """ Return molecular weight of a molecule. Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. Returns ------- numpy.float64 A molecular weight of a molecule. """ return (np.array([atomic_mass[i.upper()] for i in elements]).sum()) def center_of_coor(coordinates): """ Return the centre of coordinates. Parameters ---------- coordinates : numpy.ndarray An array containing molecule's coordinates. Returns ------- numpy.ndarray An 1d array with coordinates of the centre of coordinates excluding elements' masses. """ return (np.sum(coordinates, axis=0) / coordinates.shape[0]) def center_of_mass(elements, coordinates): """ Return the centre of mass (COM). Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. coordinates : numpy.ndarray An array containing molecule's coordinates. Returns ------- numpy.ndarray An 1d array with coordinates of the centre of mass including elements' masses. """ mass = molecular_weight(elements) mass_array = np.array([[atomic_mass[i.upper()]] * 3 for i in elements]) mass_coordinates = coordinates * mass_array return (np.sum(mass_coordinates, axis=0) / np.array([mass, mass, mass])) def compose_atom_list(*args): """ Return an `atom list` from elements and/or atom ids and coordinates. An `atom list` is a special object that some pywindowfunctions uses. It is a nested list of lists with each individual list containing: 1. [[element, coordinates (x, y, z)], ...] 2. [[element, atom key, coordinates (x, y, z)], ...] They work better for molecular re-building than two separate arrays for elements and coordinates do. Parameters ---------- elements : :class:`numpy.ndarray` An array of all elements (type: str) in a molecule. coordinates : :class:`numpy.ndarray` An array containing molecule's coordinates. atom_ids : :class:`numpy.ndarray`, optional An array of all forcfield dependent atom keys (type:str) in a molecule. Returns ------- list Version 1 or version 2 atom list depending on input parameters. Raises ------ _FunctionError : :class:`Exception` Raised when wrong number of parameters is passed to the function. """ if len(args) == 2: atom_list = [[ i[0], round(float(i[1]), 8), round(float(i[2]), 8), round(float(i[3]), 8), ] for i in np.concatenate( (args[0].reshape(-1, 1), args[1]), axis=1)] elif len(args) == 3: atom_list = [ [ i[0], i[1], round(float(i[2]), 8), round(float(i[3]), 8), round(float(i[4]), 8), ] for i in np.concatenate( (np.concatenate( (args[0].reshape(-1, 1), args[1].reshape(-1, 1) ), axis=1), args[2]), axis=1) ] else: raise _FunctionError( "The compose_atom_list() function accepts only 2 or 3 arguments.") return atom_list def decompose_atom_list(atom_list): """ Return elements and/or atom ids and coordinates from an `atom list`. Depending on input type of an atom list (version 1 or 2) 1. [[element, coordinates (x, y, z)], ...] 2. [[element, atom key, coordinates (x, y, z)], ...] the function reverses what pywindow.utilities.compose_atom_list() do. Parameters ---------- atom_list : list A nested list of lists (version 1 or 2) Returns ------- touple A touple of elements and coordinates arrays, or if input contained atom ideas, also atom ids array. """ transpose = list(zip(*atom_list)) if len(transpose) == 4: elements = np.array(transpose[0]) array_a = np.array(transpose[1]).reshape(-1, 1) array_b = np.array(transpose[2]).reshape(-1, 1) array_c = np.array(transpose[3]).reshape(-1, 1) array_ab = np.concatenate((array_a, array_b), axis=1) coordinates = np.concatenate((array_ab, array_c), axis=1) return elements, coordinates elif len(transpose) == 5: elements = np.array(transpose[0]) atom_ids = np.array(transpose[1]) array_a = np.array(transpose[2]).reshape(-1, 1) array_b = np.array(transpose[3]).reshape(-1, 1) array_c = np.array(transpose[4]).reshape(-1, 1) array_ab = np.concatenate((array_a, array_b), axis=1) coordinates = np.concatenate((array_ab, array_c), axis=1) return elements, atom_ids, coordinates else: raise _FunctionError( "The decompose_atom_list() function accepts only list of lists " " with only 4 or 5 items per sublist.") def dlf_notation(atom_key): """Return element for atom key using DL_F notation.""" split = list(atom_key) element = '' number = False count = 0 while number is False: element = "".join((element, split[count])) count += 1 if is_number(split[count]) is True: number = True # In case of for example Material Studio output, integers can also be # in the beginning of the string. As the dlf_notation decipher function # is very general in use, we have to make sure these integers are deleted. # In standard DL_F notation the string will never start with integer so it # will not affect the functionality towards it. # EDIT2: also the '?' atoms, you can delete them manually or somewhere else element = "".join(i for i in element if not is_number(i)) element = "".join(i for i in element if i != '?') return element def opls_notation(atom_key): """Return element for OPLS forcefield atom key.""" # warning for Ne, He, Na types overlap conflicts = ['ne', 'he', 'na'] if atom_key in conflicts: raise _AtomKeyConflict(( "One of the OPLS conflicting " "atom_keys has occured '{0}'. " "For how to solve this issue see the manual or " "MolecularSystem._atom_key_swap() doc string.").format(atom_key)) for element in opls_atom_keys: if atom_key in opls_atom_keys[element]: return element # In case if atom_key was not found in the OPLS keys dictionary raise _AtomKeyError(( "OPLS atom key {0} was not found in OPLS keys dictionary.").format( atom_key)) def decipher_atom_key(atom_key, forcefield): """ Return element for deciphered atom key. This functions checks if the forcfield specified by user is supported and passes the atom key to the appropriate function for deciphering. Parameters ---------- atom_key : str The atom key which is to be deciphered. forcefield : str The forcefield to which the atom key belongs to. Returns ------- str A string that is the periodic table element equvalent of forcefield atom key. """ load_funcs = { 'DLF': dlf_notation, 'DL_F': dlf_notation, 'OPLS': opls_notation, 'OPLSAA': opls_notation, 'OPLS2005': opls_notation, 'OPLS3': opls_notation, } if forcefield.upper() in load_funcs.keys(): return load_funcs[forcefield.upper()](atom_key) else: raise _ForceFieldError( ("Unfortunetely, '{0}' forcefield is not supported by pyWINDOW." " For list of supported forcefields see User's Manual or " "MolecularSystem._decipher_atom_keys() function doc string." ).format(forcefield)) def shift_com(elements, coordinates, com_adjust=np.zeros(3)): """ Return coordinates translated by some vector. Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. coordinates : numpy.ndarray An array containing molecule's coordinates. com_adjust : numpy.ndarray (default = [0, 0, 0]) Returns ------- numpy.ndarray Translated array of molecule's coordinates. """ com = center_of_mass(elements, coordinates) com = np.array([com - com_adjust] * coordinates.shape[0]) return coordinates - com def max_dim(elements, coordinates): """ Return the maximum diameter of a molecule. Parameters ---------- elements : numpy.ndarray An array of all elements (type: str) in a molecule. coordinates : numpy.ndarray An array containing molecule's coordinates. Returns ------- """ atom_vdw_vertical = np.matrix( [[atomic_vdw_radius[i.upper()]] for i in elements]) atom_vdw_horizontal = np.matrix( [atomic_vdw_radius[i.upper()] for i in elements]) dist_matrix = euclidean_distances(coordinates, coordinates) vdw_matrix = atom_vdw_vertical + atom_vdw_horizontal re_dist_matrix = dist_matrix + vdw_matrix final_matrix = np.triu(re_dist_matrix) i1, i2 = np.unravel_index(final_matrix.argmax(), final_matrix.shape) maxdim = final_matrix[i1, i2] return i1, i2, maxdim def pore_diameter(elements, coordinates, com=None): """Return pore diameter of a molecule.""" if com is None: com = center_of_mass(elements, coordinates) atom_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) dist_matrix = euclidean_distances(coordinates, com.reshape(1, -1)) re_dist_matrix = dist_matrix - atom_vdw index = np.argmin(re_dist_matrix) pored = re_dist_matrix[index][0] * 2 return (pored, index) def correct_pore_diameter(com, *params): """Return negative of a pore diameter. (optimisation function).""" elements, coordinates = params return (-pore_diameter(elements, coordinates, com)[0]) def opt_pore_diameter(elements, coordinates, bounds=None, com=None, **kwargs): """Return optimised pore diameter and it's COM.""" args = elements, coordinates if com is not None: pass else: com = center_of_mass(elements, coordinates) if bounds is None: pore_r = pore_diameter(elements, coordinates, com=com)[0] / 2 bounds = ( (com[0]-pore_r, com[0]+pore_r), (com[1]-pore_r, com[1]+pore_r), (com[2]-pore_r, com[2]+pore_r) ) minimisation = minimize( correct_pore_diameter, x0=com, args=args, bounds=bounds) pored = pore_diameter(elements, coordinates, com=minimisation.x) return (pored[0], pored[1], minimisation.x) def sphere_volume(sphere_radius): """Return volume of a sphere.""" return (4 / 3 * np.pi * sphere_radius**3) def asphericity(S): return (S[0] - (S[1] + S[2]) / 2) def acylidricity(S): return (S[1] - S[2]) def relative_shape_anisotropy(S): return (1 - 3 * ( (S[0] * S[1] + S[0] * S[2] + S[1] * S[2]) / (np.sum(S))**2)) def get_tensor_eigenvalues(T, sort=False): if sort: return (sorted(np.linalg.eigvals(T), reverse=True)) else: return (np.linalg.eigvals(T)) def get_gyration_tensor(elements, coordinates): """ Return the gyration tensor of a molecule. The gyration tensor should be invariant to the molecule's position. The known formulas for the gyration tensor have the correction for the centre of mass of the molecule, therefore, the coordinates are first corrected for the centre of mass and essentially shifted to the origin. Parameters ---------- elements : numpy.ndarray The array containing the molecule's elemental data. coordinates : numpy.ndarray The array containing the Cartesian coordinates of the molecule. Returns ------- numpy.ndarray The gyration tensor of a molecule invariant to the molecule's position. """ # First calculate COM for correction. com = centre_of_mass(elements, coordinates) # Correct the coordinates for the COM. coordinates = coordinates - com # Calculate diagonal and then other values of the matrix. diag = np.sum(coordinates**2, axis=0) xy = np.sum(coordinates[:, 0] * coordinates[:, 1]) xz = np.sum(coordinates[:, 0] * coordinates[:, 2]) yz = np.sum(coordinates[:, 1] * coordinates[:, 2]) S = np.array([[diag[0], xy, xz], [xy, diag[1], yz], [xz, yz, diag[2]]]) / coordinates.shape[0] return (S) def get_inertia_tensor(elements, coordinates): """ Return the tensor of inertia a molecule. Parameters ---------- elements : numpy.ndarray The array containing the molecule's elemental data. coordinates : numpy.ndarray The array containing the Cartesian coordinates of the molecule. Returns ------- numpy.ndarray The tensor of inertia of a molecule. """ pow2 = coordinates**2 molecular_weight = np.array( [[atomic_mass[e.upper()]] for e in elements]) diag_1 = np.sum(molecular_weight * (pow2[:, 1] + pow2[:, 2])) diag_2 = np.sum(molecular_weight * (pow2[:, 0] + pow2[:, 2])) diag_3 = np.sum(molecular_weight * (pow2[:, 0] + pow2[:, 1])) mxy = np.sum(-molecular_weight * coordinates[:, 0] * coordinates[:, 1]) mxz = np.sum(-molecular_weight * coordinates[:, 0] * coordinates[:, 2]) myz = np.sum(-molecular_weight * coordinates[:, 1] * coordinates[:, 2]) inertia_tensor = np.array([[diag_1, mxy, mxz], [mxy, diag_2, myz], [mxz, myz, diag_3]]) / coordinates.shape[0] return (inertia_tensor) def principal_axes(elements, coordinates): return (np.linalg.eig(get_inertia_tensor(elements, coordinates))[1].T) def normalize_vector(vector): """ Normalize a vector. A new vector is returned, the original vector is not modified. Parameters ---------- vector : np.array The vector to be normalized. Returns ------- np.array The normalized vector. """ v = np.divide(vector, np.linalg.norm(vector)) return np.round(v, decimals=4) def rotation_matrix_arbitrary_axis(angle, axis): """ Return a rotation matrix of `angle` radians about `axis`. Parameters ---------- angle : int or float The size of the rotation in radians. axis : numpy.array A 3 element aray which represents a vector. The vector is the axis about which the rotation is carried out. Returns ------- numpy.array A 3x3 array representing a rotation matrix. """ axis = normalize_vector(axis) a = np.cos(angle / 2) b, c, d = axis * np.sin(angle / 2) e11 = np.square(a) + np.square(b) - np.square(c) - np.square(d) e12 = 2 * (b * c - a * d) e13 = 2 * (b * d + a * c) e21 = 2 * (b * c + a * d) e22 = np.square(a) + np.square(c) - np.square(b) - np.square(d) e23 = 2 * (c * d - a * b) e31 = 2 * (b * d - a * c) e32 = 2 * (c * d + a * b) e33 = np.square(a) + np.square(d) - np.square(b) - np.square(c) return np.array([[e11, e12, e13], [e21, e22, e23], [e31, e32, e33]]) def align_principal_ax(elements, coordinates): """ """ coor = deepcopy(coordinates) new_coor = [] rot = [] for i, j in zip([2, 1, 0], [[1, 0, 0], [0, 1, 0], [0, 0, 1]]): p_axes = principal_axes(elements, coordinates) r_vec = np.cross(p_axes[i], np.array(j)) sin = np.linalg.norm(r_vec) cos = np.dot(p_axes[i], np.array(j)) ang = np.arctan2(sin, cos) R_mat = np.matrix(rotation_matrix_arbitrary_axis(ang, r_vec)) rot.append(R_mat) for i in coor: new_coord = R_mat * i.reshape(-1, 1) new_coor.append(np.array(new_coord.reshape(1, -1))[0]) new_coor = np.array(new_coor) coor = new_coor new_coor = [] return (coor, rot) def calc_asphericity(elements, coordinates): inertia_tensor = get_inertia_tensor(elements, coordinates) tensor_eigenvalues = get_tensor_eigenvalues(inertia_tensor, sort=True) return asphericity(tensor_eigenvalues) def calc_acylidricity(elements, coordinates): inertia_tensor = get_inertia_tensor(elements, coordinates) tensor_eigenvalues = get_tensor_eigenvalues(inertia_tensor, sort=True) return acylidricity(tensor_eigenvalues) def calc_relative_shape_anisotropy(elements, coordinates): inertia_tensor = get_inertia_tensor(elements, coordinates) tensor_eigenvalues = get_tensor_eigenvalues(inertia_tensor, sort=True) return relative_shape_anisotropy(tensor_eigenvalues) def unit_cell_to_lattice_array(cryst): """Return parallelpiped unit cell lattice matrix.""" a_, b_, c_, alpha, beta, gamma = cryst # Convert angles from degrees to radians. r_alpha = np.deg2rad(alpha) r_beta = np.deg2rad(beta) r_gamma = np.deg2rad(gamma) # Calculate unit cell volume that is neccessary. volume = a_ * b_ * c_ * ( 1 - np.cos(r_alpha)**2 - np.cos(r_beta)**2 - np.cos(r_gamma)**2 + 2 * np.cos(r_alpha) * np.cos(r_beta) * np.cos(r_gamma))**0.5 # Create the orthogonalisation Matrix (M^-1) - lattice matrix a_x = a_ a_y = b_ * np.cos(r_gamma) a_z = c_ * np.cos(r_beta) b_x = 0 b_y = b_ * np.sin(r_gamma) b_z = c_ * ( np.cos(r_alpha) - np.cos(r_beta) * np.cos(r_gamma)) / np.sin(r_gamma) c_x = 0 c_y = 0 c_z = volume / (a_ * b_ * np.sin(r_gamma)) lattice_array = np.array( [[a_x, a_y, a_z], [b_x, b_y, b_z], [c_x, c_y, c_z]]) return lattice_array def lattice_array_to_unit_cell(lattice_array): """Return crystallographic param. from unit cell lattice matrix.""" cell_lengths = np.sqrt(np.sum(lattice_array**2, axis=0)) gamma_r = np.arccos(lattice_array[0][1] / cell_lengths[1]) beta_r = np.arccos(lattice_array[0][2] / cell_lengths[2]) alpha_r = np.arccos( lattice_array[1][2] * np.sin(gamma_r) / cell_lengths[2] + np.cos(beta_r) * np.cos(gamma_r) ) cell_angles = [ np.rad2deg(alpha_r), np.rad2deg(beta_r), np.rad2deg(gamma_r) ] return np.append(cell_lengths, cell_angles) def volume_from_lattice_array(lattice_array): """Return unit cell's volume from lattice matrix.""" return np.linalg.det(lattice_array) def volume_from_cell_parameters(cryst): """Return unit cell's volume from crystallographic parameters.""" return volume_from_lattice_array(unit_cell_to_lattice_array(cryst)) def fractional_from_cartesian(coordinate, lattice_array): """Return a fractional coordinate from a cartesian one.""" deorthogonalisation_M = np.matrix(np.linalg.inv(lattice_array)) fractional = deorthogonalisation_M * coordinate.reshape(-1, 1) return np.array(fractional.reshape(1, -1)) def cartisian_from_fractional(coordinate, lattice_array): """Return cartesian coordinate from a fractional one.""" orthogonalisation_M = np.matrix(lattice_array) orthogonal = orthogonalisation_M * coordinate.reshape(-1, 1) return np.array(orthogonal.reshape(1, -1)) def cart2frac_all(coordinates, lattice_array): """Convert all cartesian coordinates to fractional.""" frac_coordinates = deepcopy(coordinates) for coord in range(frac_coordinates.shape[0]): frac_coordinates[coord] = fractional_from_cartesian( frac_coordinates[coord], lattice_array) return frac_coordinates def frac2cart_all(frac_coordinates, lattice_array): """Convert all fractional coordinates to cartesian.""" coordinates = deepcopy(frac_coordinates) for coord in range(coordinates.shape[0]): coordinates[coord] = cartisian_from_fractional(coordinates[coord], lattice_array) return coordinates def create_supercell(system, supercell=[[-1, 1], [-1, 1], [-1, 1]]): """Create a supercell.""" if 'lattice' not in system.keys(): matrix = unit_cell_to_lattice_array(system['unit_cell']) else: matrix = system['lattice'] coordinates = deepcopy(system['coordinates']) multiplication_matrices = [] for a_ in range(supercell[0][0], supercell[0][1] + 1): for b_ in range(supercell[1][0], supercell[1][1] + 1): for c_ in range(supercell[2][0], supercell[2][1] + 1): mult_matrix = np.array([[a_, b_, c_]]) mult_matrix = np.repeat( mult_matrix, coordinates.shape[0], axis=0) multiplication_matrices.append(mult_matrix) frac_coordinates = cart2frac_all(coordinates, matrix) updated_coordinates = [] for mat in multiplication_matrices: updated_coor = frac_coordinates + mat updated_coordinates.append(updated_coor) supercell_frac_coordinates = np.concatenate(updated_coordinates, axis=0) supercell_coordinates = frac2cart_all(supercell_frac_coordinates, matrix) # Now for each new cell in the supercell we need to repeat the # elements array so that it maches new_elements = deepcopy(system['elements']) new_ids = deepcopy(system['atom_ids']) for i in range(len(updated_coordinates) - 1): new_elements = np.concatenate((new_elements, system['elements'])) new_ids = np.concatenate((new_ids, system['atom_ids'])) cryst = lattice_array_to_unit_cell(matrix) supercell_system = { 'elements': new_elements, 'atom_ids': new_ids, 'coordinates': supercell_coordinates, 'unit_cell': cryst, 'lattice': matrix, } return supercell_system def is_inside_polyhedron(point, polyhedron): if polyhedron.shape == (1, 6): matrix = unit_cell_to_lattice_array(polyhedron) if polyhedron.shape == (3, 3): matrix = polyhedron frac_coord = pw.utilities.fractional_from_cartesian(point, matrix)[0] if 0 <= frac_coord[0] <= 1.000 and 0 <= frac_coord[ 1] <= 1.000 and 0 <= frac_coord[2] <= 1.000: return True else: return False def normal_vector(origin, vectors): """Return normal vector for two vectors with same origin.""" return np.cross(vectors[0] - origin, vectors[1] - origin) def discrete_molecules(system, rebuild=None, tol=0.4): """ Decompose molecular system into individual discreet molecules. Note ---- New formula for bonds: (26/07/17) The two atoms, x and y, are considered bonded if the distance between them, calculated with distance matrix, is within the ranges: .. :math: Rcov(x) + Rcov(y) - t < R(x,y) < Rcov(x) + Rcov(y) + t where Rcov is the covalent radius and the tolarenace (t) is set to 0.4 Angstrom. """ # First we check which operation mode we use. # 1) Non-periodic MolecularSystem. # 2) Periodic MolecularSystem without rebuilding. # 3) Periodic Molecular system with rebuilding (supercell provided). if rebuild is not None: mode = 3 else: if 'unit_cell' in system.keys(): if system['unit_cell'].shape == (6,): mode = 2 else: mode = 1 elif 'lattice' in system.keys(): if system['lattice'].shape == (3, 3): mode = 2 else: mode = 1 else: mode = 1 # We create a list containing all atoms, theirs periodic elements and # coordinates. As this process is quite complicated, we need a list # which we will gradually be reducing. try: elements = system['elements'] coordinates = system['coordinates'] except KeyError: raise _FunctionError( "The 'elements' key is missing in the 'system' dictionary " "attribute of the MolecularSystem object. Which means, you need to" " decipher the forcefield based atom keys first (see manual)." ) coordinates = system['coordinates'] args = (elements, coordinates) adj = 0 # If there are forcefield 'atom ids' as well we will retain them. if 'atom_ids' in system.keys(): atom_ids = system['atom_ids'] args = (elements, atom_ids, coordinates) adj = 1 atom_list = compose_atom_list(*args) atom_coor = decompose_atom_list(atom_list)[1 + adj] # Scenario 1: We load a non-periodic MolecularSystem. # We will not have 'unit_cell' nor 'lattice' keywords in the dictionary # and also we do not do any re-building. # Scenario 2: We load a periodic MolecularSystem. We want to only Extract # complete molecules that do not have been affected by the periodic # boundary. # Scenario 3: We load a periodic Molecular System. We want it to be rebuild # therefore, we also provide a supercell. # Scenarios 2 and 3 require a lattice and also their origin is at origin. # Scenario 1 should have the origin at the center of mass of the system. # EDIT 09-04-18: All origins/pseudo_origin had to be skewed towards some # direction (x + 0.01) so that there would be no ambiguity in periodic # ang highly symmetric systems where the choice of the closest atom would # be random from a set of equally far choices - bug found in the testing # this way rebuild system should always look the same from the same input # and on different machines. if mode == 2 or mode == 3: # Scenarios 2 or 3. origin = np.array([0.01, 0., 0.]) if 'lattice' not in system.keys(): matrix = unit_cell_to_lattice_array(system['unit_cell']) else: matrix = system['lattice'] pseudo_origin_frac = np.array([0.26, 0.25, 0.25]) pseudo_origin = cartisian_from_fractional(pseudo_origin_frac, matrix) # If a supercell is also provided that encloses the unit cell for the # reconstruction of the molecules through the periodic boundary. if rebuild is not None: selements = rebuild['elements'] sids = rebuild['atom_ids'] scoordinates = rebuild['coordinates'] satom_list = compose_atom_list(selements, sids, scoordinates) satom_coor = decompose_atom_list(satom_list)[1 + adj] # There is one more step. We need to sort out for all the # reconstructed molecules, which are the ones that belong to the # unit cell. As we did the reconstruction to every chunk in the unit # cell we have now some molecules that belong to neighbouring cells. # The screening is simple. If the COM of a molecule translated to # fractional coordinates (so that it works for parallelpiped) is # within the unit cell boundaries <0, 1> then it's it. There is # an exception, for the trajectories, very often the unit cell # is centered at origin. Therefore we need to use <-0.5, 0.5> # boundary. We will simply decide which is the case by calculating # the centre of mass of the whole system. system_com = center_of_mass(elements, coordinates) if np.allclose(system_com, origin, atol=1e-00): boundary = np.array([-0.5, 0.5]) else: boundary = np.array([0., 1.]) else: # Scenario 1. pseudo_origin = center_of_mass( elements, coordinates) + np.array([0.01, 0., 0.]) # Here the final discrete molecules will be stored. molecules = [] # Exceptions. Usually end-point atoms that create single bonds or # just a separate atoms in the system. exceptions = ['H', 'CL', 'BR', 'F', 'HE', 'AR', 'NE', 'KR', 'XE', 'RN'] # The upper limit for distances analysed for bonds will be assigned for # a given system (to save time). We take set('elements') and then find # the largest R(cov) in the system and set the max_dist as a double # of it plus the 150% tolerance (tol). set_of_elements = set(system['elements']) max_r_cov = max([ atomic_covalent_radius[i.upper()] for i in set_of_elements]) max_dist = 2 * max_r_cov + tol # We continue untill all items in the list have been analysed and popped. while atom_list: inside_atoms_heavy = [ i for i in atom_list if i[0].upper() not in exceptions ] if inside_atoms_heavy: # Now we create an array of atom coordinates. It does seem # somehow counter-intuitive as this is what we started with # and made it into a list. But, in my opinion it's the only # way to do it. It's hard to control and delete items in two # separate arrays that we started with and we don't want # atoms already assigned in our array for distance matrix. inside_atoms_coord_heavy = decompose_atom_list(inside_atoms_heavy)[ 1 + adj] dist_matrix = euclidean_distances(inside_atoms_coord_heavy, pseudo_origin.reshape(1, -1)) atom_index_x, _ = np.unravel_index(dist_matrix.argmin(), dist_matrix.shape) # Added this so that lone atoms (even if heavy) close to the # periodic boundary are not analysed, as they surely have matching # symmetry equivalence that bind to a bigger atom cluster inside # the unit_cell. potential_starting_point = inside_atoms_heavy[atom_index_x] pot_arr = np.array(potential_starting_point[1 + adj:]) dist_matrix = euclidean_distances( atom_coor, pot_arr.reshape(1, -1) ) idx = (dist_matrix > 0.1) * (dist_matrix < max_dist) if len(idx) < 1: pass else: working_list = [potential_starting_point] else: # Safety check. break final_molecule = [] while working_list: working_list_temp = [] try: atom_coor = decompose_atom_list(atom_list)[1 + adj] except _FunctionError: atom_coor = None for i in working_list: if i[0].upper() not in exceptions: # It's of GREATEST importance that the i_arr variable # is assigned here before entering the atom_coor loop.! # Otherwise it will not be re-asigned when the satom_list # still iterates, but the atom_list is already empty... i_arr = np.array(i[1 + adj:]) if atom_coor is not None: dist_matrix = euclidean_distances( atom_coor, i_arr.reshape(1, -1) ) idx = (dist_matrix > 0.1) * (dist_matrix < max_dist) neighbours_indexes = np.where(idx)[0] for j in neighbours_indexes: j_arr = np.array(atom_coor[j]) r_i_j = distance(i_arr, j_arr) r_cov_i_j = atomic_covalent_radius[ i[0].upper()] + atomic_covalent_radius[ atom_list[j][0].upper()] if r_cov_i_j - tol < r_i_j < r_cov_i_j + tol: working_list_temp.append(atom_list[j]) if rebuild is not None: sdist_matrix = euclidean_distances( satom_coor, i_arr.reshape(1, -1)) sidx = (sdist_matrix > 0.1) * (sdist_matrix < max_dist) sneighbours_indexes = np.where(sidx)[0] for j in sneighbours_indexes: if satom_list[j] in atom_list: pass else: j_arr = np.array(satom_coor[j]) r_i_j = distance(i_arr, j_arr) r_cov_i_j = atomic_covalent_radius[ i[0].upper() ] + atomic_covalent_radius[ satom_list[j][0].upper()] if r_cov_i_j - tol < r_i_j < r_cov_i_j + tol: working_list_temp.append(satom_list[j]) final_molecule.append(i) else: final_molecule.append(i) for i in working_list: try: atom_list.remove(i) except ValueError: pass # We empty the working list as all the items were analysed # and moved to the final_molecule list. working_list = [] # We make sure there are no duplicates in the working_list_temp. working_list_temp = unique(working_list_temp) # Now we move the entries from the temporary working list # to the working list for looping analysys. for i in working_list_temp: # We make sure that only new and unassigned atoms are # being transfered. if i not in final_molecule: working_list.append(i) final_molecule_dict = {} final_molecule_dict['elements'] = np.array( [x[0] for x in final_molecule], dtype='str') final_molecule_dict['coordinates'] = np.array( [[*xyz[1 + adj:]] for xyz in final_molecule]) if adj == 1: final_molecule_dict['atom_ids'] = np.array( [x[1] for x in final_molecule], dtype='str') # In general we always want the molecule so the initial bool_ is True. bool_ = True # But, for periodic only if the molecule is in the initial unit cell. if rebuild is not None: com = center_of_mass(final_molecule_dict['elements'], final_molecule_dict['coordinates']) com_frac = fractional_from_cartesian(com, matrix)[0] # If we don't round the numerical errors will come up. com_frac_round = np.around(com_frac, decimals=8) bool_ = np.all(np.logical_and(com_frac_round >= boundary[0], com_frac_round < boundary[1]), axis=0) if bool(bool_) is True: molecules.append(final_molecule_dict) return molecules def angle_between_vectors(x, y): """Calculate the angle between two vectors x and y.""" first_step = abs(x[0] * y[0] + x[1] * y[1] + x[2] * y[2]) / ( np.sqrt(x[0]**2 + x[1]**2 + x[2]**2) * np.sqrt(y[0]**2 + y[1]**2 + y[2]**2)) second_step = np.arccos(first_step) return (second_step) def vector_analysis(vector, coordinates, elements_vdw, increment=1.0): """Analyse a sampling vector's path for window analysis purpose.""" # Calculate number of chunks if vector length is divided by increment. chunks = int(np.linalg.norm(vector) // increment) # Create a single chunk. chunk = vector / chunks # Calculate set of points on vector's path every increment. vector_pathway = np.array([chunk * i for i in range(chunks + 1)]) analysed_vector = np.array([ np.amin( euclidean_distances(coordinates, i.reshape(1, -1)) - elements_vdw) for i in vector_pathway ]) if all(i > 0 for i in analysed_vector): pos = np.argmin(analysed_vector) # As first argument we need to give the distance from the origin. dist = np.linalg.norm(chunk * pos) return np.array( [dist, analysed_vector[pos] * 2, *chunk * pos, *vector]) def vector_preanalysis(vector, coordinates, elements_vdw, increment=1.0): norm_vec = vector/np.linalg.norm(vector) intersections = [] origin = center_of_coor(coordinates) L = coordinates - origin t_ca = np.dot(L, norm_vec) d = np.sqrt(np.einsum('ij,ij->i', L, L) - t_ca**2) under_sqrt = elements_vdw**2 - d**2 diag = under_sqrt.diagonal() positions = np.argwhere(diag > 0) for pos in positions: t_hc = np.sqrt(diag[pos[0]]) t_0 = t_ca[pos][0] - t_hc t_1 = t_ca[pos][0] + t_hc P_0 = origin + np.dot(t_0, norm_vec) P_1 = origin + np.dot(t_1, norm_vec) # print(np.linalg.norm(P_0), np.linalg.norm(P_1)) if np.linalg.norm(P_0) < np.linalg.norm(P_1): intersections.append(1) else: intersections.append(0) if sum(intersections) == 0: return vector_analysis(vector, coordinates, elements_vdw, increment) def optimise_xy(xy, *args): """Return negative pore diameter for x and y coordinates optimisation.""" z, elements, coordinates = args window_com = np.array([xy[0], xy[1], z]) return -pore_diameter(elements, coordinates, com=window_com)[0] def optimise_z(z, *args): """Return pore diameter for coordinates optimisation in z direction.""" x, y, elements, coordinates = args window_com = np.array([x, y, z]) return pore_diameter(elements, coordinates, com=window_com)[0] def window_analysis(window, elements, coordinates, elements_vdw, increment2=0.1, z_bounds=[None, None], lb_z=True, z_second_mini=False, **kwargs): """ Return window diameter and window's centre. Parameters ---------- widnow: list elements: numpy.array coordinates: numpy.array elements_vdw: numpy.array step: float """ # Copy the coordinates as we will manipulate them. coordinates = deepcopy(coordinates) # Find the vector with the largest window sampling diameter from the pool. vector_ = window[window.argmax(axis=0)[1]][5:8] vector_analysed = vector_analysis( vector_, coordinates, elements_vdw, increment=increment2) # A safety check, if the refined analysis give None we end the function. if vector_analysed is not None: pass else: return None vector = vector_analysed[5:8] # Unit vectors. vec_a = [1, 0, 0] vec_b = [0, 1, 0] vec_c = [0, 0, 1] # Angles needed for rotation (in radians) to rotate and translate the # molecule for the vector to become the Z-axis. angle_1 = angle_between_vectors(np.array([vector[0], vector[1], 0]), vec_a) angle_2 = angle_between_vectors(vector, vec_c) # Depending in which cartesian coordinate system area the vector is # We need a rotation into a different direction and by different value. if vector[0] >= 0 and vector[1] >= 0 and vector[2] >= 0: angle_1 = -angle_1 angle_2 = -angle_2 if vector[0] < 0 and vector[1] >= 0 and vector[2] >= 0: angle_1 = np.pi * 2 + angle_1 angle_2 = angle_2 if vector[0] >= 0 and vector[1] < 0 and vector[2] >= 0: angle_1 = angle_1 angle_2 = -angle_2 if vector[0] < 0 and vector[1] < 0 and vector[2] >= 0: angle_1 = np.pi * 2 - angle_1 if vector[0] >= 0 and vector[1] >= 0 and vector[2] < 0: angle_1 = -angle_1 angle_2 = np.pi + angle_2 if vector[0] < 0 and vector[1] >= 0 and vector[2] < 0: angle_2 = np.pi - angle_2 if vector[0] >= 0 and vector[1] < 0 and vector[2] < 0: angle_2 = angle_2 + np.pi if vector[0] < 0 and vector[1] < 0 and vector[2] < 0: angle_1 = -angle_1 angle_2 = np.pi - angle_2 # Rotation matrix for rotation around Z-axis with angle_1. rotation_around_z = np.array([[np.cos(angle_1), -np.sin(angle_1), 0], [np.sin(angle_1), np.cos(angle_1), 0], [0, 0, 1]]) # Rotate the whole molecule around with rotation_around_z. coordinates = np.array([np.dot(rotation_around_z, i) for i in coordinates]) # Rotation matrix for rotation around Y-axis with angle_2 rotation_around_y = np.array([[np.cos(angle_2), 0, np.sin(angle_2)], [0, 1, 0], [-np.sin(angle_2), 0, np.cos(angle_2)]]) # Rotate the whole molecule around with rotation_around_y. coordinates = np.array([np.dot(rotation_around_y, i) for i in coordinates]) # Third step is translation. We are now at [0, 0, -z]. # We shift the molecule so that center of the window is at the origin. # The `z` is from original vector analysis. It is the point on the vector # where the largest sampling sphere was (vector_analysed[0]). new_z = vector_analysed[0] # Translate the whole molecule to shift window's center to origin. coordinates = coordinates - np.array([[0, 0, new_z]] * coordinates.shape[0]) # !!!Here the window center (xy and z) optimisation take place!!! window_com = np.array([0, 0, 0], dtype=float) # The lb_z parameter is 'lower bound equal to z' which means, # that we set the lower bound for the z optimisation to be equal # to the -new_z as in some cages it's the COM - pore that is the # limiting diameter. But, no lower than new_z because we don't want to # move into the other direction. if lb_z: z_bounds[0] = -new_z window_diameter, _ = pore_diameter(elements, coordinates, com=window_com) # SciPy minimisation on z coordinate. z_args = (window_com[0], window_com[1], elements, coordinates) z_optimisation = minimize( optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds]) # Substitute the z coordinate for a minimised one. window_com[2] = z_optimisation.x[0] # SciPy brute optimisation on x and y coordinates in window plane. xy_args = (window_com[2], elements, coordinates) xy_bounds = ((-window_diameter / 2, window_diameter / 2), (-window_diameter / 2, window_diameter / 2)) xy_optimisation = brute( optimise_xy, xy_bounds, args=xy_args, full_output=True, finish=fmin) # Substitute the x and y coordinates for the optimised ones. window_com[0] = xy_optimisation[0][0] window_com[1] = xy_optimisation[0][1] # Additional SciPy minimisation on z coordinate. Added on 18 May 2017. # We can argue which aproach is best. Whether z opt and then xy opt # or like now z opt -> xy opt -> additional z opt etc. I have also tested # a loop of optimisations until some convergence and optimisation of # xyz coordinates at the same time by optimising these two optimisations. # In the end. I think this approach is best for cages. # Update 20 October 2017: I made this optional and turned off by default # In many cases that worsen the quality of the results and should be used # with caution. if z_second_mini is not False: z_args = (window_com[0], window_com[1], elements, coordinates) # The z_bounds should be passed in kwargs. z_optimisation = minimize( optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds]) # Substitute the z coordinate for a minimised one. window_com[2] = z_optimisation.x[0] # Calculate the new window diameter. window_diameter, _ = pore_diameter(elements, coordinates, com=window_com) # To get the window true centre of mass we need to revere the rotation and # translation operations on the window com. # Reverse the translation by substracting the new_z. window_com[2] = window_com[2] + new_z angle_2_1 = -angle_2 reverse_around_y = np.array([[np.cos(angle_2_1), 0, np.sin(angle_2_1)], [0, 1, 0], [-np.sin(angle_2_1), 0, np.cos(angle_2_1)]]) # Reversing the second rotation around Y-axis. window_com = np.dot(reverse_around_y, window_com) angle_1_1 = -angle_1 reverse_around_z = np.array([[np.cos(angle_1_1), -np.sin(angle_1_1), 0], [np.sin(angle_1_1), np.cos(angle_1_1), 0], [0, 0, 1]]) # Reversing the first rotation around Z-axis. window_com = np.dot(reverse_around_z, window_com) return (window_diameter, window_com) def find_windows(elements, coordinates, processes=None, mol_size=None, adjust=1, pore_opt=True, increment=1.0, **kwargs): """Return windows diameters and center of masses for a molecule.""" # Copy the coordinates as will perform many opertaions on them coordinates = deepcopy(coordinates) # Center of our cartesian system is always at origin origin = np.array([0, 0, 0]) # Initial center of mass to reverse translation at the end initial_com = center_of_mass(elements, coordinates) # Shift the cage to the origin using either the standard center of mass # or if pore_opt flag is True, the optimised pore center as center of mass if pore_opt is True: # Normally the pore is calculated from the COM of a molecule. # So, esentially the molecule's COM is the pore center. # To shift the molecule so that the center of the optimised pore # is at the origin of the system and not the center of the not # optimised one, we need to adjust the shift. We also have to update # the initial com. com_adjust = initial_com - opt_pore_diameter(elements, coordinates, ** kwargs)[2] initial_com = initial_com - com_adjust coordinates = shift_com(elements, coordinates, com_adjust=com_adjust) else: # Otherwise, we just shift the cage to the origin. coordinates = shift_com(elements, coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # We calculate maximum diameter of a molecule to determine the radius # of a sampling sphere neccessary to enclose the whole molecule. shpere_radius = max_dim(elements, coordinates)[2] / 2 sphere_surface_area = 4 * np.pi * shpere_radius**2 # Here we determine the number of sampling points necessary for a fine # sampling. Smaller molecules require more finner density of sampling # points on the sampling sphere's surface, whereas largen require less. # This formula was created so that larger molecule do not take much longer # to analyse, as number_sampling_points*length_of_sampling_vectors # results in quadratic increase of sampling time. The 250 factor was # specificly determined to produce close to 1 sampling point /Angstrom^2 # for a sphere of radius ~ 24 Angstrom. We can adjust how fine is the # sampling by changing the adjust factor. number_of_points = int(np.log10(sphere_surface_area) * 250 * adjust) # Here I use code by <NAME> for spreading points on a sphere: # http://blog.marmakoide.org/?p=1 golden_angle = np.pi * (3 - np.sqrt(5)) theta = golden_angle * np.arange(number_of_points) z = np.linspace(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0, number_of_points) radius = np.sqrt(1 - z * z) points = np.zeros((number_of_points, 3)) points[:, 0] = radius * np.cos(theta) * shpere_radius points[:, 1] = radius * np.sin(theta) * shpere_radius points[:, 2] = z * shpere_radius # Here we will compute the eps parameter for the sklearn.cluster.DBSCAN # (3-dimensional spatial clustering algorithm) which is the mean distance # to the closest point of all points. values = [] tree = KDTree(points) for i in points: dist, ind = tree.query(i.reshape(1, -1), k=10) values.extend(dist) mean_distance = np.mean(values) # The best eps is parametrized when adding the mean distance and it's root. eps = mean_distance + mean_distance**0.5 # Here we either run the sampling points vectors analysis in serial # or parallel. The vectors that go through molecular pores return # as analysed list with the increment at vector's path with largest # included sphere, coordinates for this narrow channel point. vectors # that find molecule on theirs path are return as NoneType object. # Parralel analysis on user's defined number of CPUs. if processes: pool = Pool(processes=processes) parallel = [ pool.apply_async( vector_preanalysis, args=( point, coordinates, elements_vdw, ), kwds={'increment': increment}) for point in points ] results = [p.get() for p in parallel if p.get() is not None] pool.terminate() # Dataset is an array of sampling points coordinates. dataset = np.array([x[5:8] for x in results]) else: results = [ vector_preanalysis( point, coordinates, elements_vdw, increment=increment) for point in points ] results = [x for x in results if x is not None] dataset = np.array([x[5:8] for x in results]) # If not a single vector was returned from the analysis it mean that # no molecular channels (what we call windows here) connects the # molecule's interior with the surroungsings (exterior space). # The number of windows in that case equals zero and zero is returned. # Otherwise we continue our search for windows. if len(results) == 0: return None else: # Perfomr DBSCAN to cluster the sampling points vectors. # the n_jobs will be developed later. # db = DBSCAN(eps=eps, n_jobs=_ncpus).fit(dataset) db = DBSCAN(eps=eps).fit(dataset) core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True labels = set(db.labels_) # Assing cluster label to a sampling point. clusters = [[i, j] for i, j in zip(results, db.labels_)] clustered_results = {label: [] for label in labels} # Create a dictionary of clusters with points listed. [clustered_results[i[1]].append(i[0]) for i in clusters] # No for the sampling point vector in each cluster that had # the widest channel's 'neck' is assumed to pass the closest # to the window's center and therefore will be passed to # window analysis function. # We also pass user defined settings for window analysis. # Again either in serlia or in parallel. # Noisy points get a cluster label -1, therefore we have to exclude it. if processes: pool = Pool(processes=processes) parallel = [ pool.apply_async( window_analysis, args=(np.array(clustered_results[cluster]), elements, coordinates, elements_vdw), kwds=kwargs) for cluster in clustered_results if cluster != -1 ] window_results = [p.get() for p in parallel if p.get() is not None] pool.terminate() else: window_results = [ window_analysis( np.array(clustered_results[cluster]), elements, coordinates, elements_vdw, **kwargs) for cluster in clustered_results if cluster != -1 ] # The function returns two numpy arrays, one with windows diameters # in Angstrom, second with corresponding windows center's coordinates windows = np.array([result[0] for result in window_results if result is not None]) windows_coms = np.array( [np.add(result[1], initial_com) for result in window_results if result is not None]) # Safety measures, if one of the windows is None or negative a warning # should be raised. for result in window_results: if result is None: msg_ = " ".join( ['Warning. One of the analysed windows has', 'returned as None. See manual.'] ) # print(msg_) elif result[0] < 0: msg_ = " ".join( ['Warning. One of the analysed windows has a vdW', 'corrected diameter smaller than 0. See manual.'] ) # print(msg_) return (windows, windows_coms) def window_shape(window, elements, coordinates, increment2=0.1, z_bounds=[None, None], lb_z=True, z_second_mini=False, **kwargs): """ Return window diameter and window's centre. Parameters ---------- widnow: list elements: numpy.array coordinates: numpy.array elements_vdw: numpy.array step: float """ # Copy the coordinates as we will manipulate them. coordinates = deepcopy(coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # Find the vector with the largest window sampling diameter from the pool. vector_ = window[window.argmax(axis=0)[1]][5:8] vector_analysed = vector_analysis( vector_, coordinates, elements_vdw, increment=increment2) # A safety check, if the refined analysis give None we end the function. if vector_analysed is not None: pass else: return None vector = vector_analysed[5:8] # Unit vectors. vec_a = [1, 0, 0] vec_b = [0, 1, 0] vec_c = [0, 0, 1] # Angles needed for rotation (in radians) to rotate and translate the # molecule for the vector to become the Z-axis. angle_1 = angle_between_vectors(np.array([vector[0], vector[1], 0]), vec_a) angle_2 = angle_between_vectors(vector, vec_c) # Depending in which cartesian coordinate system area the vector is # We need a rotation into a different direction and by different value. if vector[0] >= 0 and vector[1] >= 0 and vector[2] >= 0: angle_1 = -angle_1 angle_2 = -angle_2 if vector[0] < 0 and vector[1] >= 0 and vector[2] >= 0: angle_1 = np.pi * 2 + angle_1 angle_2 = angle_2 if vector[0] >= 0 and vector[1] < 0 and vector[2] >= 0: angle_1 = angle_1 angle_2 = -angle_2 if vector[0] < 0 and vector[1] < 0 and vector[2] >= 0: angle_1 = np.pi * 2 - angle_1 if vector[0] >= 0 and vector[1] >= 0 and vector[2] < 0: angle_1 = -angle_1 angle_2 = np.pi + angle_2 if vector[0] < 0 and vector[1] >= 0 and vector[2] < 0: angle_2 = np.pi - angle_2 if vector[0] >= 0 and vector[1] < 0 and vector[2] < 0: angle_2 = angle_2 + np.pi if vector[0] < 0 and vector[1] < 0 and vector[2] < 0: angle_1 = -angle_1 angle_2 = np.pi - angle_2 # Rotation matrix for rotation around Z-axis with angle_1. rotation_around_z = np.array([[np.cos(angle_1), -np.sin(angle_1), 0], [np.sin(angle_1), np.cos(angle_1), 0], [0, 0, 1]]) # Rotate the whole molecule around with rotation_around_z. coordinates = np.array([np.dot(rotation_around_z, i) for i in coordinates]) # Rotation matrix for rotation around Y-axis with angle_2 rotation_around_y = np.array([[np.cos(angle_2), 0, np.sin(angle_2)], [0, 1, 0], [-np.sin(angle_2), 0, np.cos(angle_2)]]) # Rotate the whole molecule around with rotation_around_y. coordinates = np.array([np.dot(rotation_around_y, i) for i in coordinates]) # Third step is translation. We are now at [0, 0, -z]. # We shift the molecule so that center of the window is at the origin. # The `z` is from original vector analysis. It is the point on the vector # where the largest sampling sphere was (vector_analysed[0]). new_z = vector_analysed[0] # Translate the whole molecule to shift window's center to origin. coordinates = coordinates - np.array([[0, 0, new_z]] * coordinates.shape[0]) # !!!Here the window center (xy and z) optimisation take place!!! window_com = np.array([0, 0, 0], dtype=float) # The lb_z parameter is 'lower bound equal to z' which means, # that we set the lower bound for the z optimisation to be equal # to the -new_z as in some cages it's the COM - pore that is the # limiting diameter. But, no lower than new_z because we don't want to # move into the other direction. if lb_z: z_bounds[0] = -new_z window_diameter, _ = pore_diameter(elements, coordinates, com=window_com) # SciPy minimisation on z coordinate. z_args = (window_com[0], window_com[1], elements, coordinates) z_optimisation = minimize( optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds]) # Substitute the z coordinate for a minimised one. window_com[2] = z_optimisation.x[0] # SciPy brute optimisation on x and y coordinates in window plane. xy_args = (window_com[2], elements, coordinates) xy_bounds = ((-window_diameter / 2, window_diameter / 2), (-window_diameter / 2, window_diameter / 2)) xy_optimisation = brute( optimise_xy, xy_bounds, args=xy_args, full_output=True, finish=fmin) # Substitute the x and y coordinates for the optimised ones. window_com[0] = xy_optimisation[0][0] window_com[1] = xy_optimisation[0][1] # Additional SciPy minimisation on z coordinate. Added on 18 May 2017. # We can argue which aproach is best. Whether z opt and then xy opt # or like now z opt -> xy opt -> additional z opt etc. I have also tested # a loop of optimisations until some convergence and optimisation of # xyz coordinates at the same time by optimising these two optimisations. # In the end. I think this approach is best for cages. # Update 20 October 2017: I made this optional and turned off by default # In many cases that worsen the quality of the results and should be used # with caution. if z_second_mini is not False: z_args = (window_com[0], window_com[1], elements, coordinates) # The z_bounds should be passed in kwargs. z_optimisation = minimize( optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds]) # Substitute the z coordinate for a minimised one. window_com[2] = z_optimisation.x[0] # Getting the 2D plane crosssection of a window in XY plain. (10-04-18) # First translation around Z axis. vectors_translated = [ [ np.dot(rotation_around_z, i[5:])[0], np.dot(rotation_around_z, i[5:])[1], np.dot(rotation_around_z, i[5:])[2], ] for i in window ] # Second rotation around Y axis. vectors_translated = [ [ np.dot(rotation_around_y, i)[0], np.dot(rotation_around_y, i)[1], np.dot(rotation_around_y, i)[2] ] for i in vectors_translated ] ref_distance = (new_z - window_com[2]) / np.linalg.norm(vector) # Cutting the XY plane. XY_plane = np.array( [ [i[0] * ref_distance, i[1] * ref_distance] for i in vectors_translated ] ) return XY_plane def find_windows_new(elements, coordinates, processes=None, mol_size=None, adjust=1, pore_opt=True, increment=1.0, **kwargs): """Return windows diameters and center of masses for a molecule.""" # Copy the coordinates as will perform many opertaions on them coordinates = deepcopy(coordinates) # Center of our cartesian system is always at origin origin = np.array([0, 0, 0]) # Initial center of mass to reverse translation at the end initial_com = center_of_mass(elements, coordinates) # Shift the cage to the origin using either the standard center of mass # or if pore_opt flag is True, the optimised pore center as center of mass if pore_opt is True: # Normally the pore is calculated from the COM of a molecule. # So, esentially the molecule's COM is the pore center. # To shift the molecule so that the center of the optimised pore # is at the origin of the system and not the center of the not # optimised one, we need to adjust the shift. We also have to update # the initial com. com_adjust = initial_com - opt_pore_diameter(elements, coordinates, ** kwargs)[2] initial_com = initial_com - com_adjust coordinates = shift_com(elements, coordinates, com_adjust=com_adjust) else: # Otherwise, we just shift the cage to the origin. coordinates = shift_com(elements, coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # We calculate maximum diameter of a molecule to determine the radius # of a sampling sphere neccessary to enclose the whole molecule. shpere_radius = max_dim(elements, coordinates)[2] / 2 sphere_surface_area = 4 * np.pi * shpere_radius**2 # Here we determine the number of sampling points necessary for a fine # sampling. Smaller molecules require more finner density of sampling # points on the sampling sphere's surface, whereas largen require less. # This formula was created so that larger molecule do not take much longer # to analyse, as number_sampling_points*length_of_sampling_vectors # results in quadratic increase of sampling time. The 250 factor was # specificly determined to produce close to 1 sampling point /Angstrom^2 # for a sphere of radius ~ 24 Angstrom. We can adjust how fine is the # sampling by changing the adjust factor. number_of_points = int(np.log10(sphere_surface_area) * 250 * adjust) # Here I use code by <NAME> for spreading points on a sphere: # http://blog.marmakoide.org/?p=1 golden_angle = np.pi * (3 - np.sqrt(5)) theta = golden_angle * np.arange(number_of_points) z = np.linspace(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0, number_of_points) radius = np.sqrt(1 - z * z) points = np.zeros((number_of_points, 3)) points[:, 0] = radius * np.cos(theta) * shpere_radius points[:, 1] = radius * np.sin(theta) * shpere_radius points[:, 2] = z * shpere_radius # Here we will compute the eps parameter for the sklearn.cluster.DBSCAN # (3-dimensional spatial clustering algorithm) which is the mean distance # to the closest point of all points. values = [] tree = KDTree(points) for i in points: dist, ind = tree.query(i.reshape(1, -1), k=10) values.extend(dist) mean_distance = np.mean(values) # The best eps is parametrized when adding the mean distance and it's root. eps = mean_distance + mean_distance**0.5 # Here we either run the sampling points vectors analysis in serial # or parallel. The vectors that go through molecular pores return # as analysed list with the increment at vector's path with largest # included sphere, coordinates for this narrow channel point. vectors # that find molecule on theirs path are return as NoneType object. # Parralel analysis on user's defined number of CPUs. if processes: pool = Pool(processes=processes) parallel = [ pool.apply_async( vector_preanalysis, args=( point, coordinates, elements_vdw, ), kwds={'increment': increment}) for point in points ] results = [p.get() for p in parallel if p.get() is not None] pool.terminate() # Dataset is an array of sampling points coordinates. dataset = np.array([x[5:8] for x in results]) else: results = [ vector_preanalysis( point, coordinates, elements_vdw, increment=increment) for point in points ] results = [x for x in results if x is not None] dataset = np.array([x[5:8] for x in results]) # If not a single vector was returned from the analysis it mean that # no molecular channels (what we call windows here) connects the # molecule's interior with the surroungsings (exterior space). # The number of windows in that case equals zero and zero is returned. # Otherwise we continue our search for windows. if len(results) == 0: return None else: # Perfomr DBSCAN to cluster the sampling points vectors. # the n_jobs will be developed later. # db = DBSCAN(eps=eps, n_jobs=_ncpus).fit(dataset) db = DBSCAN(eps=eps).fit(dataset) core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True labels = set(db.labels_) # Assing cluster label to a sampling point. clusters = [[i, j] for i, j in zip(results, db.labels_)] clustered_results = {label: [] for label in labels} # Create a dictionary of clusters with points listed. [clustered_results[i[1]].append(i[0]) for i in clusters] return clustered_results, elements, coordinates, initial_com def calculate_window_diameter(window, elements, coordinates, **kwargs): elements_vdw = np.array( [[atomic_vdw_radius[x.upper()]] for x in elements] ) window_results = window_analysis( np.array(window), elements, coordinates, elements_vdw, **kwargs ) # The function returns two numpy arrays, one with windows diameters # in Angstrom, second with corresponding windows center's coordinates if window_results: return window_results[0] else: return None def get_window_com(window, elements, coordinates, initial_com, **kwargs): elements_vdw = np.array( [[atomic_vdw_radius[x.upper()]] for x in elements] ) window_results = window_analysis( np.array(window), elements, coordinates, elements_vdw, **kwargs ) # The function returns two numpy arrays, one with windows diameters # in Angstrom, second with corresponding windows center's coordinates if window_results: # I correct the COM of window for the initial COM of the cage return np.add(window_results[1], initial_com) else: return None def vector_analysis_reversed(vector, coordinates, elements_vdw): norm_vec = vector/np.linalg.norm(vector) intersections = [] origin = center_of_coor(coordinates) L = coordinates - origin t_ca = np.dot(L, norm_vec) d = np.sqrt(np.einsum('ij,ij->i', L, L) - t_ca**2) under_sqrt = elements_vdw**2 - d**2 diag = under_sqrt.diagonal() positions = np.argwhere(diag > 0) for pos in positions: t_hc = np.sqrt(diag[pos[0]]) t_0 = t_ca[pos][0] - t_hc t_1 = t_ca[pos][0] + t_hc P_0 = origin + np.dot(t_0, norm_vec) P_1 = origin + np.dot(t_1, norm_vec) if np.linalg.norm(P_0) < np.linalg.norm(P_1): intersections.append([np.linalg.norm(P_1), P_1]) if intersections: intersection = sorted(intersections, reverse=True)[0][1] dist_origin = np.linalg.norm(intersection) return [dist_origin, intersection] def find_average_diameter(elements, coordinates, adjust=1, increment=0.1, processes=None, **kwargs): """Return average diameter for a molecule.""" # Copy the coordinates as will perform many opertaions on them coordinates = deepcopy(coordinates) # Center of our cartesian system is always at origin origin = np.array([0, 0, 0]) # Initial center of mass to reverse translation at the end initial_com = center_of_mass(elements, coordinates) # We just shift the cage to the origin. coordinates = shift_com(elements, coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # We calculate maximum diameter of a molecule to determine the radius # of a sampling sphere neccessary to enclose the whole molecule. shpere_radius = max_dim(elements, coordinates)[2] sphere_surface_area = 4 * np.pi * shpere_radius**2 # Here we determine the number of sampling points necessary for a fine # sampling. Smaller molecules require more finner density of sampling # points on the sampling sphere's surface, whereas largen require less. # This formula was created so that larger molecule do not take much longer # to analyse, as number_sampling_points*length_of_sampling_vectors # results in quadratic increase of sampling time. The 250 factor was # specificly determined to produce close to 1 sampling point /Angstrom^2 # for a sphere of radius ~ 24 Angstrom. We can adjust how fine is the # sampling by changing the adjust factor. number_of_points = int(np.log10(sphere_surface_area) * 250 * adjust) # Here I use code by <NAME> for spreading points on a sphere: # http://blog.marmakoide.org/?p=1 golden_angle = np.pi * (3 - np.sqrt(5)) theta = golden_angle * np.arange(number_of_points) z = np.linspace(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0, number_of_points) radius = np.sqrt(1 - z * z) points = np.zeros((number_of_points, 3)) points[:, 0] = radius * np.cos(theta) * shpere_radius points[:, 1] = radius * np.sin(theta) * shpere_radius points[:, 2] = z * shpere_radius # Here we analyse the vectors and retain the ones that create the molecule # outline. if processes: pool = Pool(processes=processes) parallel = [ pool.apply_async( vector_analysis_reversed, args=( point, coordinates, elements_vdw) ) for point in points ] results = [p.get() for p in parallel if p.get() is not None] pool.terminate() else: results = [ vector_analysis_reversed( point, coordinates, elements_vdw) for point in points ] results_cleaned = [x[0] for x in results if x is not None] return np.mean(results_cleaned)*2 def vector_analysis_pore_shape(vector, coordinates, elements_vdw): norm_vec = vector/np.linalg.norm(vector) intersections = [] origin = center_of_coor(coordinates) L = coordinates - origin t_ca = np.dot(L, norm_vec) d = np.sqrt(np.einsum('ij,ij->i', L, L) - t_ca**2) under_sqrt = elements_vdw**2 - d**2 diag = under_sqrt.diagonal() positions = np.argwhere(diag > 0) for pos in positions: t_hc = np.sqrt(diag[pos[0]]) t_0 = t_ca[pos][0] - t_hc t_1 = t_ca[pos][0] + t_hc P_0 = origin + np.dot(t_0, norm_vec) P_1 = origin + np.dot(t_1, norm_vec) # print(np.linalg.norm(P_0), np.linalg.norm(P_1)) if np.linalg.norm(P_0) < np.linalg.norm(P_1): intersections.append([np.linalg.norm(P_0), P_0]) if intersections: return sorted(intersections)[0][1] def calculate_pore_shape(elements, coordinates, adjust=1, increment=0.1, **kwargs): """Return average diameter for a molecule.""" # Copy the coordinates as will perform many opertaions on them coordinates = deepcopy(coordinates) # Center of our cartesian system is always at origin origin = np.array([0, 0, 0]) # Initial center of mass to reverse translation at the end initial_com = center_of_mass(elements, coordinates) # We just shift the cage to the origin. coordinates = shift_com(elements, coordinates) # We create an array of vdw radii of elements. elements_vdw = np.array([[atomic_vdw_radius[x.upper()]] for x in elements]) # We calculate maximum diameter of a molecule to determine the radius # of a sampling sphere neccessary to enclose the whole molecule. shpere_radius = max_dim(elements, coordinates)[2]/2 sphere_surface_area = 4 * np.pi * shpere_radius**2 # Here we determine the number of sampling points necessary for a fine # sampling. Smaller molecules require more finner density of sampling # points on the sampling sphere's surface, whereas largen require less. # This formula was created so that larger molecule do not take much longer # to analyse, as number_sampling_points*length_of_sampling_vectors # results in quadratic increase of sampling time. The 250 factor was # specificly determined to produce close to 1 sampling point /Angstrom^2 # for a sphere of radius ~ 24 Angstrom. We can adjust how fine is the # sampling by changing the adjust factor. number_of_points = int(np.log10(sphere_surface_area) * 250 * adjust) # Here I use code by <NAME> for spreading points on a sphere: # http://blog.marmakoide.org/?p=1 golden_angle = np.pi * (3 - np.sqrt(5)) theta = golden_angle * np.arange(number_of_points) z = np.linspace(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0, number_of_points) radius = np.sqrt(1 - z * z) points = np.zeros((number_of_points, 3)) points[:, 0] = radius * np.cos(theta) * shpere_radius points[:, 1] = radius * np.sin(theta) * shpere_radius points[:, 2] = z * shpere_radius # Here we will compute the eps parameter for the sklearn.cluster.DBSCAN # (3-dimensional spatial clustering algorithm) which is the mean distance # to the closest point of all points. values = [] tree = KDTree(points) for i in points: dist, ind = tree.query(i.reshape(1, -1), k=10) values.extend(dist) mean_distance = np.mean(values) # The best eps is parametrized when adding the mean distance and it's root. eps = mean_distance + mean_distance**0.5 # Here we either run the sampling points vectors analysis in serial # or parallel. The vectors that go through molecular voids return # as analysed list with the increment at vector's path with largest # included sphere, coordinates for this narrow channel point. vectors # that find molecule on theirs path are return as NoneType object. results = [ vector_analysis_pore_shape(point, coordinates, elements_vdw) for point in points ] results_cleaned = [x for x in results if x is not None] ele = np.array(['X'] * len(results_cleaned)) coor = np.array(results_cleaned) return coor def circumcircle_window(coordinates, atom_set): # Calculating circumcircle A = np.array(coordinates[int(atom_set[0])]) B = np.array(coordinates[int(atom_set[1])]) C = np.array(coordinates[int(atom_set[2])]) a = np.linalg.norm(C - B) b = np.linalg.norm(C - A) c = np.linalg.norm(B - A) s = (a + b + c) / 2 # Holden et al. method is intended to only work with triads of carbons, # therefore I substract the vdW radii for a carbon. # These equation calculaties the window's radius. R = a*b*c / 4 / np.sqrt(s * (s - a) * (s - b) * (s - c)) - 1.70 # This steps are used to calculate the window's COM. b1 = a*a * (b*b + c*c - a*a) b2 = b*b * (a*a + c*c - b*b) b3 = c*c * (a*a + b*b - c*c) COM = np.column_stack((A, B, C)).dot(np.hstack((b1, b2, b3))) # The window's COM. COM /= b1 + b2 + b3 return R, COM def circumcircle(coordinates, atom_sets): pld_diameter_list = [] pld_com_list = [] iter_ = 0 while iter_ < len(atom_sets): R, COM = circumcircle_window(coordinates, atom_sets[iter_]) pld_diameter_list.append(R*2) pld_com_list.append(COM) iter_ += 1 return pld_diameter_list, pld_com_list
[ "numpy.linalg.eigvals", "numpy.triu", "numpy.sum", "numpy.arctan2", "numpy.allclose", "numpy.einsum", "numpy.argmin", "numpy.around", "numpy.mean", "numpy.linalg.norm", "numpy.sin", "numpy.arange", "numpy.round", "sklearn.cluster.DBSCAN", "scipy.optimize.minimize", "numpy.zeros_like", "sklearn.metrics.pairwise.euclidean_distances", "numpy.append", "numpy.linspace", "numpy.linalg.det", "numpy.add", "numpy.arccos", "numpy.log10", "numpy.repeat", "copy.deepcopy", "numpy.square", "numpy.cross", "numpy.hstack", "numpy.linalg.inv", "numpy.cos", "multiprocessing.Pool", "numpy.argwhere", "numpy.dot", "numpy.concatenate", "numpy.matrix", "numpy.logical_and", "numpy.deg2rad", "numpy.zeros", "numpy.rad2deg", "numpy.where", "numpy.array", "scipy.optimize.brute", "numpy.column_stack", "sklearn.neighbors.KDTree", "numpy.sqrt" ]
[((11504, 11515), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (11512, 11515), True, 'import numpy as np\n'), ((11997, 12048), 'numpy.array', 'np.array', (['([com - com_adjust] * coordinates.shape[0])'], {}), '([com - com_adjust] * coordinates.shape[0])\n', (12005, 12048), True, 'import numpy as np\n'), ((12618, 12663), 'sklearn.metrics.pairwise.euclidean_distances', 'euclidean_distances', (['coordinates', 'coordinates'], {}), '(coordinates, coordinates)\n', (12637, 12663), False, 'from sklearn.metrics.pairwise import euclidean_distances\n'), ((12786, 12809), 'numpy.triu', 'np.triu', (['re_dist_matrix'], {}), '(re_dist_matrix)\n', (12793, 12809), True, 'import numpy as np\n'), ((13318, 13343), 'numpy.argmin', 'np.argmin', (['re_dist_matrix'], {}), '(re_dist_matrix)\n', (13327, 13343), True, 'import numpy as np\n'), ((14159, 14224), 'scipy.optimize.minimize', 'minimize', (['correct_pore_diameter'], {'x0': 'com', 'args': 'args', 'bounds': 'bounds'}), '(correct_pore_diameter, x0=com, args=args, bounds=bounds)\n', (14167, 14224), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((15872, 15904), 'numpy.sum', 'np.sum', (['(coordinates ** 2)'], {'axis': '(0)'}), '(coordinates ** 2, axis=0)\n', (15878, 15904), True, 'import numpy as np\n'), ((15912, 15957), 'numpy.sum', 'np.sum', (['(coordinates[:, 0] * coordinates[:, 1])'], {}), '(coordinates[:, 0] * coordinates[:, 1])\n', (15918, 15957), True, 'import numpy as np\n'), ((15967, 16012), 'numpy.sum', 'np.sum', (['(coordinates[:, 0] * coordinates[:, 2])'], {}), '(coordinates[:, 0] * coordinates[:, 2])\n', (15973, 16012), True, 'import numpy as np\n'), ((16022, 16067), 'numpy.sum', 'np.sum', (['(coordinates[:, 1] * coordinates[:, 2])'], {}), '(coordinates[:, 1] * coordinates[:, 2])\n', (16028, 16067), True, 'import numpy as np\n'), ((16752, 16804), 'numpy.sum', 'np.sum', (['(molecular_weight * (pow2[:, 1] + pow2[:, 2]))'], {}), '(molecular_weight * (pow2[:, 1] + pow2[:, 2]))\n', (16758, 16804), True, 'import numpy as np\n'), ((16818, 16870), 'numpy.sum', 'np.sum', (['(molecular_weight * (pow2[:, 0] + pow2[:, 2]))'], {}), '(molecular_weight * (pow2[:, 0] + pow2[:, 2]))\n', (16824, 16870), True, 'import numpy as np\n'), ((16884, 16936), 'numpy.sum', 'np.sum', (['(molecular_weight * (pow2[:, 0] + pow2[:, 1]))'], {}), '(molecular_weight * (pow2[:, 0] + pow2[:, 1]))\n', (16890, 16936), True, 'import numpy as np\n'), ((16948, 17013), 'numpy.sum', 'np.sum', (['(-molecular_weight * coordinates[:, 0] * coordinates[:, 1])'], {}), '(-molecular_weight * coordinates[:, 0] * coordinates[:, 1])\n', (16954, 17013), True, 'import numpy as np\n'), ((17024, 17089), 'numpy.sum', 'np.sum', (['(-molecular_weight * coordinates[:, 0] * coordinates[:, 2])'], {}), '(-molecular_weight * coordinates[:, 0] * coordinates[:, 2])\n', (17030, 17089), True, 'import numpy as np\n'), ((17100, 17165), 'numpy.sum', 'np.sum', (['(-molecular_weight * coordinates[:, 1] * coordinates[:, 2])'], {}), '(-molecular_weight * coordinates[:, 1] * coordinates[:, 2])\n', (17106, 17165), True, 'import numpy as np\n'), ((17822, 17845), 'numpy.round', 'np.round', (['v'], {'decimals': '(4)'}), '(v, decimals=4)\n', (17830, 17845), True, 'import numpy as np\n'), ((18361, 18378), 'numpy.cos', 'np.cos', (['(angle / 2)'], {}), '(angle / 2)\n', (18367, 18378), True, 'import numpy as np\n'), ((18817, 18878), 'numpy.array', 'np.array', (['[[e11, e12, e13], [e21, e22, e23], [e31, e32, e33]]'], {}), '([[e11, e12, e13], [e21, e22, e23], [e31, e32, e33]])\n', (18825, 18878), True, 'import numpy as np\n'), ((18951, 18972), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (18959, 18972), False, 'from copy import deepcopy\n'), ((20551, 20568), 'numpy.deg2rad', 'np.deg2rad', (['alpha'], {}), '(alpha)\n', (20561, 20568), True, 'import numpy as np\n'), ((20582, 20598), 'numpy.deg2rad', 'np.deg2rad', (['beta'], {}), '(beta)\n', (20592, 20598), True, 'import numpy as np\n'), ((20613, 20630), 'numpy.deg2rad', 'np.deg2rad', (['gamma'], {}), '(gamma)\n', (20623, 20630), True, 'import numpy as np\n'), ((21226, 21287), 'numpy.array', 'np.array', (['[[a_x, a_y, a_z], [b_x, b_y, b_z], [c_x, c_y, c_z]]'], {}), '([[a_x, a_y, a_z], [b_x, b_y, b_z], [c_x, c_y, c_z]])\n', (21234, 21287), True, 'import numpy as np\n'), ((21518, 21566), 'numpy.arccos', 'np.arccos', (['(lattice_array[0][1] / cell_lengths[1])'], {}), '(lattice_array[0][1] / cell_lengths[1])\n', (21527, 21566), True, 'import numpy as np\n'), ((21580, 21628), 'numpy.arccos', 'np.arccos', (['(lattice_array[0][2] / cell_lengths[2])'], {}), '(lattice_array[0][2] / cell_lengths[2])\n', (21589, 21628), True, 'import numpy as np\n'), ((21881, 21917), 'numpy.append', 'np.append', (['cell_lengths', 'cell_angles'], {}), '(cell_lengths, cell_angles)\n', (21890, 21917), True, 'import numpy as np\n'), ((22034, 22062), 'numpy.linalg.det', 'np.linalg.det', (['lattice_array'], {}), '(lattice_array)\n', (22047, 22062), True, 'import numpy as np\n'), ((22699, 22723), 'numpy.matrix', 'np.matrix', (['lattice_array'], {}), '(lattice_array)\n', (22708, 22723), True, 'import numpy as np\n'), ((22967, 22988), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (22975, 22988), False, 'from copy import deepcopy\n'), ((23312, 23338), 'copy.deepcopy', 'deepcopy', (['frac_coordinates'], {}), '(frac_coordinates)\n', (23320, 23338), False, 'from copy import deepcopy\n'), ((23821, 23852), 'copy.deepcopy', 'deepcopy', (["system['coordinates']"], {}), "(system['coordinates'])\n", (23829, 23852), False, 'from copy import deepcopy\n'), ((24549, 24592), 'numpy.concatenate', 'np.concatenate', (['updated_coordinates'], {'axis': '(0)'}), '(updated_coordinates, axis=0)\n', (24563, 24592), True, 'import numpy as np\n'), ((24796, 24824), 'copy.deepcopy', 'deepcopy', (["system['elements']"], {}), "(system['elements'])\n", (24804, 24824), False, 'from copy import deepcopy\n'), ((24839, 24867), 'copy.deepcopy', 'deepcopy', (["system['atom_ids']"], {}), "(system['atom_ids'])\n", (24847, 24867), False, 'from copy import deepcopy\n'), ((25881, 25931), 'numpy.cross', 'np.cross', (['(vectors[0] - origin)', '(vectors[1] - origin)'], {}), '(vectors[0] - origin, vectors[1] - origin)\n', (25889, 25931), True, 'import numpy as np\n'), ((38535, 38556), 'numpy.arccos', 'np.arccos', (['first_step'], {}), '(first_step)\n', (38544, 38556), True, 'import numpy as np\n'), ((39736, 39755), 'numpy.dot', 'np.dot', (['L', 'norm_vec'], {}), '(L, norm_vec)\n', (39742, 39755), True, 'import numpy as np\n'), ((39900, 39921), 'numpy.argwhere', 'np.argwhere', (['(diag > 0)'], {}), '(diag > 0)\n', (39911, 39921), True, 'import numpy as np\n'), ((40612, 40639), 'numpy.array', 'np.array', (['[xy[0], xy[1], z]'], {}), '([xy[0], xy[1], z])\n', (40620, 40639), True, 'import numpy as np\n'), ((40868, 40887), 'numpy.array', 'np.array', (['[x, y, z]'], {}), '([x, y, z])\n', (40876, 40887), True, 'import numpy as np\n'), ((41555, 41576), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (41563, 41576), False, 'from copy import deepcopy\n'), ((44755, 44787), 'numpy.array', 'np.array', (['[0, 0, 0]'], {'dtype': 'float'}), '([0, 0, 0], dtype=float)\n', (44763, 44787), True, 'import numpy as np\n'), ((45354, 45424), 'scipy.optimize.minimize', 'minimize', (['optimise_z'], {'x0': 'window_com[2]', 'args': 'z_args', 'bounds': '[z_bounds]'}), '(optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds])\n', (45362, 45424), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((45799, 45873), 'scipy.optimize.brute', 'brute', (['optimise_xy', 'xy_bounds'], {'args': 'xy_args', 'full_output': '(True)', 'finish': 'fmin'}), '(optimise_xy, xy_bounds, args=xy_args, full_output=True, finish=fmin)\n', (45804, 45873), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((47647, 47683), 'numpy.dot', 'np.dot', (['reverse_around_y', 'window_com'], {}), '(reverse_around_y, window_com)\n', (47653, 47683), True, 'import numpy as np\n'), ((47974, 48010), 'numpy.dot', 'np.dot', (['reverse_around_z', 'window_com'], {}), '(reverse_around_z, window_com)\n', (47980, 48010), True, 'import numpy as np\n'), ((48452, 48473), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (48460, 48473), False, 'from copy import deepcopy\n'), ((48544, 48563), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (48552, 48563), True, 'import numpy as np\n'), ((50953, 51044), 'numpy.linspace', 'np.linspace', (['(1 - 1.0 / number_of_points)', '(1.0 / number_of_points - 1.0)', 'number_of_points'], {}), '(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0,\n number_of_points)\n', (50964, 51044), True, 'import numpy as np\n'), ((51074, 51092), 'numpy.sqrt', 'np.sqrt', (['(1 - z * z)'], {}), '(1 - z * z)\n', (51081, 51092), True, 'import numpy as np\n'), ((51106, 51137), 'numpy.zeros', 'np.zeros', (['(number_of_points, 3)'], {}), '((number_of_points, 3))\n', (51114, 51137), True, 'import numpy as np\n'), ((51514, 51528), 'sklearn.neighbors.KDTree', 'KDTree', (['points'], {}), '(points)\n', (51520, 51528), False, 'from sklearn.neighbors import KDTree\n'), ((51653, 51668), 'numpy.mean', 'np.mean', (['values'], {}), '(values)\n', (51660, 51668), True, 'import numpy as np\n'), ((55503, 55575), 'numpy.array', 'np.array', (['[result[0] for result in window_results if result is not None]'], {}), '([result[0] for result in window_results if result is not None])\n', (55511, 55575), True, 'import numpy as np\n'), ((56879, 56900), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (56887, 56900), False, 'from copy import deepcopy\n'), ((60210, 60242), 'numpy.array', 'np.array', (['[0, 0, 0]'], {'dtype': 'float'}), '([0, 0, 0], dtype=float)\n', (60218, 60242), True, 'import numpy as np\n'), ((60809, 60879), 'scipy.optimize.minimize', 'minimize', (['optimise_z'], {'x0': 'window_com[2]', 'args': 'z_args', 'bounds': '[z_bounds]'}), '(optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds])\n', (60817, 60879), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((61254, 61328), 'scipy.optimize.brute', 'brute', (['optimise_xy', 'xy_bounds'], {'args': 'xy_args', 'full_output': '(True)', 'finish': 'fmin'}), '(optimise_xy, xy_bounds, args=xy_args, full_output=True, finish=fmin)\n', (61259, 61328), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((63160, 63246), 'numpy.array', 'np.array', (['[[i[0] * ref_distance, i[1] * ref_distance] for i in vectors_translated]'], {}), '([[i[0] * ref_distance, i[1] * ref_distance] for i in\n vectors_translated])\n', (63168, 63246), True, 'import numpy as np\n'), ((63743, 63764), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (63751, 63764), False, 'from copy import deepcopy\n'), ((63835, 63854), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (63843, 63854), True, 'import numpy as np\n'), ((66244, 66335), 'numpy.linspace', 'np.linspace', (['(1 - 1.0 / number_of_points)', '(1.0 / number_of_points - 1.0)', 'number_of_points'], {}), '(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0,\n number_of_points)\n', (66255, 66335), True, 'import numpy as np\n'), ((66365, 66383), 'numpy.sqrt', 'np.sqrt', (['(1 - z * z)'], {}), '(1 - z * z)\n', (66372, 66383), True, 'import numpy as np\n'), ((66397, 66428), 'numpy.zeros', 'np.zeros', (['(number_of_points, 3)'], {}), '((number_of_points, 3))\n', (66405, 66428), True, 'import numpy as np\n'), ((66805, 66819), 'sklearn.neighbors.KDTree', 'KDTree', (['points'], {}), '(points)\n', (66811, 66819), False, 'from sklearn.neighbors import KDTree\n'), ((66944, 66959), 'numpy.mean', 'np.mean', (['values'], {}), '(values)\n', (66951, 66959), True, 'import numpy as np\n'), ((70815, 70834), 'numpy.dot', 'np.dot', (['L', 'norm_vec'], {}), '(L, norm_vec)\n', (70821, 70834), True, 'import numpy as np\n'), ((70979, 71000), 'numpy.argwhere', 'np.argwhere', (['(diag > 0)'], {}), '(diag > 0)\n', (70990, 71000), True, 'import numpy as np\n'), ((71784, 71805), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (71792, 71805), False, 'from copy import deepcopy\n'), ((71876, 71895), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (71884, 71895), True, 'import numpy as np\n'), ((73422, 73513), 'numpy.linspace', 'np.linspace', (['(1 - 1.0 / number_of_points)', '(1.0 / number_of_points - 1.0)', 'number_of_points'], {}), '(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0,\n number_of_points)\n', (73433, 73513), True, 'import numpy as np\n'), ((73543, 73561), 'numpy.sqrt', 'np.sqrt', (['(1 - z * z)'], {}), '(1 - z * z)\n', (73550, 73561), True, 'import numpy as np\n'), ((73575, 73606), 'numpy.zeros', 'np.zeros', (['(number_of_points, 3)'], {}), '((number_of_points, 3))\n', (73583, 73606), True, 'import numpy as np\n'), ((74704, 74723), 'numpy.dot', 'np.dot', (['L', 'norm_vec'], {}), '(L, norm_vec)\n', (74710, 74723), True, 'import numpy as np\n'), ((74868, 74889), 'numpy.argwhere', 'np.argwhere', (['(diag > 0)'], {}), '(diag > 0)\n', (74879, 74889), True, 'import numpy as np\n'), ((75596, 75617), 'copy.deepcopy', 'deepcopy', (['coordinates'], {}), '(coordinates)\n', (75604, 75617), False, 'from copy import deepcopy\n'), ((75688, 75707), 'numpy.array', 'np.array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (75696, 75707), True, 'import numpy as np\n'), ((77236, 77327), 'numpy.linspace', 'np.linspace', (['(1 - 1.0 / number_of_points)', '(1.0 / number_of_points - 1.0)', 'number_of_points'], {}), '(1 - 1.0 / number_of_points, 1.0 / number_of_points - 1.0,\n number_of_points)\n', (77247, 77327), True, 'import numpy as np\n'), ((77357, 77375), 'numpy.sqrt', 'np.sqrt', (['(1 - z * z)'], {}), '(1 - z * z)\n', (77364, 77375), True, 'import numpy as np\n'), ((77389, 77420), 'numpy.zeros', 'np.zeros', (['(number_of_points, 3)'], {}), '((number_of_points, 3))\n', (77397, 77420), True, 'import numpy as np\n'), ((77797, 77811), 'sklearn.neighbors.KDTree', 'KDTree', (['points'], {}), '(points)\n', (77803, 77811), False, 'from sklearn.neighbors import KDTree\n'), ((77936, 77951), 'numpy.mean', 'np.mean', (['values'], {}), '(values)\n', (77943, 77951), True, 'import numpy as np\n'), ((78675, 78700), 'numpy.array', 'np.array', (['results_cleaned'], {}), '(results_cleaned)\n', (78683, 78700), True, 'import numpy as np\n'), ((78950, 78971), 'numpy.linalg.norm', 'np.linalg.norm', (['(C - B)'], {}), '(C - B)\n', (78964, 78971), True, 'import numpy as np\n'), ((78980, 79001), 'numpy.linalg.norm', 'np.linalg.norm', (['(C - A)'], {}), '(C - A)\n', (78994, 79001), True, 'import numpy as np\n'), ((79010, 79031), 'numpy.linalg.norm', 'np.linalg.norm', (['(B - A)'], {}), '(B - A)\n', (79024, 79031), True, 'import numpy as np\n'), ((3485, 3505), 'numpy.sum', 'np.sum', (['((a - b) ** 2)'], {}), '((a - b) ** 2)\n', (3491, 3505), True, 'import numpy as np\n'), ((4243, 4270), 'numpy.sum', 'np.sum', (['coordinates'], {'axis': '(0)'}), '(coordinates, axis=0)\n', (4249, 4270), True, 'import numpy as np\n'), ((4911, 4943), 'numpy.sum', 'np.sum', (['mass_coordinates'], {'axis': '(0)'}), '(mass_coordinates, axis=0)\n', (4917, 4943), True, 'import numpy as np\n'), ((4946, 4974), 'numpy.array', 'np.array', (['[mass, mass, mass]'], {}), '([mass, mass, mass])\n', (4954, 4974), True, 'import numpy as np\n'), ((7629, 7651), 'numpy.array', 'np.array', (['transpose[0]'], {}), '(transpose[0])\n', (7637, 7651), True, 'import numpy as np\n'), ((7839, 7881), 'numpy.concatenate', 'np.concatenate', (['(array_a, array_b)'], {'axis': '(1)'}), '((array_a, array_b), axis=1)\n', (7853, 7881), True, 'import numpy as np\n'), ((7904, 7947), 'numpy.concatenate', 'np.concatenate', (['(array_ab, array_c)'], {'axis': '(1)'}), '((array_ab, array_c), axis=1)\n', (7918, 7947), True, 'import numpy as np\n'), ((14849, 14869), 'numpy.linalg.eigvals', 'np.linalg.eigvals', (['T'], {}), '(T)\n', (14866, 14869), True, 'import numpy as np\n'), ((16076, 16143), 'numpy.array', 'np.array', (['[[diag[0], xy, xz], [xy, diag[1], yz], [xz, yz, diag[2]]]'], {}), '([[diag[0], xy, xz], [xy, diag[1], yz], [xz, yz, diag[2]]])\n', (16084, 16143), True, 'import numpy as np\n'), ((17188, 17258), 'numpy.array', 'np.array', (['[[diag_1, mxy, mxz], [mxy, diag_2, myz], [mxz, myz, diag_3]]'], {}), '([[diag_1, mxy, mxz], [mxy, diag_2, myz], [mxz, myz, diag_3]])\n', (17196, 17258), True, 'import numpy as np\n'), ((17787, 17809), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (17801, 17809), True, 'import numpy as np\n'), ((18400, 18417), 'numpy.sin', 'np.sin', (['(angle / 2)'], {}), '(angle / 2)\n', (18406, 18417), True, 'import numpy as np\n'), ((18474, 18486), 'numpy.square', 'np.square', (['d'], {}), '(d)\n', (18483, 18486), True, 'import numpy as np\n'), ((18633, 18645), 'numpy.square', 'np.square', (['d'], {}), '(d)\n', (18642, 18645), True, 'import numpy as np\n'), ((18792, 18804), 'numpy.square', 'np.square', (['c'], {}), '(c)\n', (18801, 18804), True, 'import numpy as np\n'), ((19190, 19211), 'numpy.linalg.norm', 'np.linalg.norm', (['r_vec'], {}), '(r_vec)\n', (19204, 19211), True, 'import numpy as np\n'), ((19271, 19291), 'numpy.arctan2', 'np.arctan2', (['sin', 'cos'], {}), '(sin, cos)\n', (19281, 19291), True, 'import numpy as np\n'), ((19548, 19566), 'numpy.array', 'np.array', (['new_coor'], {}), '(new_coor)\n', (19556, 19566), True, 'import numpy as np\n'), ((20951, 20966), 'numpy.cos', 'np.cos', (['r_gamma'], {}), '(r_gamma)\n', (20957, 20966), True, 'import numpy as np\n'), ((20982, 20996), 'numpy.cos', 'np.cos', (['r_beta'], {}), '(r_beta)\n', (20988, 20996), True, 'import numpy as np\n'), ((21024, 21039), 'numpy.sin', 'np.sin', (['r_gamma'], {}), '(r_gamma)\n', (21030, 21039), True, 'import numpy as np\n'), ((21119, 21134), 'numpy.sin', 'np.sin', (['r_gamma'], {}), '(r_gamma)\n', (21125, 21134), True, 'import numpy as np\n'), ((21470, 21504), 'numpy.sum', 'np.sum', (['(lattice_array ** 2)'], {'axis': '(0)'}), '(lattice_array ** 2, axis=0)\n', (21476, 21504), True, 'import numpy as np\n'), ((21799, 21818), 'numpy.rad2deg', 'np.rad2deg', (['alpha_r'], {}), '(alpha_r)\n', (21809, 21818), True, 'import numpy as np\n'), ((21820, 21838), 'numpy.rad2deg', 'np.rad2deg', (['beta_r'], {}), '(beta_r)\n', (21830, 21838), True, 'import numpy as np\n'), ((21840, 21859), 'numpy.rad2deg', 'np.rad2deg', (['gamma_r'], {}), '(gamma_r)\n', (21850, 21859), True, 'import numpy as np\n'), ((22408, 22436), 'numpy.linalg.inv', 'np.linalg.inv', (['lattice_array'], {}), '(lattice_array)\n', (22421, 22436), True, 'import numpy as np\n'), ((24941, 24991), 'numpy.concatenate', 'np.concatenate', (["(new_elements, system['elements'])"], {}), "((new_elements, system['elements']))\n", (24955, 24991), True, 'import numpy as np\n'), ((25010, 25055), 'numpy.concatenate', 'np.concatenate', (["(new_ids, system['atom_ids'])"], {}), "((new_ids, system['atom_ids']))\n", (25024, 25055), True, 'import numpy as np\n'), ((29152, 29178), 'numpy.array', 'np.array', (['[0.01, 0.0, 0.0]'], {}), '([0.01, 0.0, 0.0])\n', (29160, 29178), True, 'import numpy as np\n'), ((29371, 29399), 'numpy.array', 'np.array', (['[0.26, 0.25, 0.25]'], {}), '([0.26, 0.25, 0.25])\n', (29379, 29399), True, 'import numpy as np\n'), ((30782, 30823), 'numpy.allclose', 'np.allclose', (['system_com', 'origin'], {'atol': '(1.0)'}), '(system_com, origin, atol=1.0)\n', (30793, 30823), True, 'import numpy as np\n'), ((37124, 37177), 'numpy.array', 'np.array', (['[x[0] for x in final_molecule]'], {'dtype': '"""str"""'}), "([x[0] for x in final_molecule], dtype='str')\n", (37132, 37177), True, 'import numpy as np\n'), ((37236, 37290), 'numpy.array', 'np.array', (['[[*xyz[1 + adj:]] for xyz in final_molecule]'], {}), '([[*xyz[1 + adj:]] for xyz in final_molecule])\n', (37244, 37290), True, 'import numpy as np\n'), ((39273, 39299), 'numpy.argmin', 'np.argmin', (['analysed_vector'], {}), '(analysed_vector)\n', (39282, 39299), True, 'import numpy as np\n'), ((39389, 39416), 'numpy.linalg.norm', 'np.linalg.norm', (['(chunk * pos)'], {}), '(chunk * pos)\n', (39403, 39416), True, 'import numpy as np\n'), ((39432, 39499), 'numpy.array', 'np.array', (['[dist, analysed_vector[pos] * 2, *(chunk * pos), *vector]'], {}), '([dist, analysed_vector[pos] * 2, *(chunk * pos), *vector])\n', (39440, 39499), True, 'import numpy as np\n'), ((39609, 39631), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (39623, 39631), True, 'import numpy as np\n'), ((39963, 39984), 'numpy.sqrt', 'np.sqrt', (['diag[pos[0]]'], {}), '(diag[pos[0]])\n', (39970, 39984), True, 'import numpy as np\n'), ((42251, 42286), 'numpy.array', 'np.array', (['[vector[0], vector[1], 0]'], {}), '([vector[0], vector[1], 0])\n', (42259, 42286), True, 'import numpy as np\n'), ((44578, 44626), 'numpy.array', 'np.array', (['([[0, 0, new_z]] * coordinates.shape[0])'], {}), '([[0, 0, new_z]] * coordinates.shape[0])\n', (44586, 44626), True, 'import numpy as np\n'), ((46824, 46894), 'scipy.optimize.minimize', 'minimize', (['optimise_z'], {'x0': 'window_com[2]', 'args': 'z_args', 'bounds': '[z_bounds]'}), '(optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds])\n', (46832, 46894), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((50917, 50944), 'numpy.arange', 'np.arange', (['number_of_points'], {}), '(number_of_points)\n', (50926, 50944), True, 'import numpy as np\n'), ((52244, 52269), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'processes'}), '(processes=processes)\n', (52248, 52269), False, 'from multiprocessing import Pool\n'), ((52728, 52763), 'numpy.array', 'np.array', (['[x[5:8] for x in results]'], {}), '([x[5:8] for x in results])\n', (52736, 52763), True, 'import numpy as np\n'), ((53013, 53048), 'numpy.array', 'np.array', (['[x[5:8] for x in results]'], {}), '([x[5:8] for x in results])\n', (53021, 53048), True, 'import numpy as np\n'), ((53681, 53718), 'numpy.zeros_like', 'np.zeros_like', (['db.labels_'], {'dtype': 'bool'}), '(db.labels_, dtype=bool)\n', (53694, 53718), True, 'import numpy as np\n'), ((57706, 57741), 'numpy.array', 'np.array', (['[vector[0], vector[1], 0]'], {}), '([vector[0], vector[1], 0])\n', (57714, 57741), True, 'import numpy as np\n'), ((60033, 60081), 'numpy.array', 'np.array', (['([[0, 0, new_z]] * coordinates.shape[0])'], {}), '([[0, 0, new_z]] * coordinates.shape[0])\n', (60041, 60081), True, 'import numpy as np\n'), ((62279, 62349), 'scipy.optimize.minimize', 'minimize', (['optimise_z'], {'x0': 'window_com[2]', 'args': 'z_args', 'bounds': '[z_bounds]'}), '(optimise_z, x0=window_com[2], args=z_args, bounds=[z_bounds])\n', (62287, 62349), False, 'from scipy.optimize import brute, fmin, minimize\n'), ((63094, 63116), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (63108, 63116), True, 'import numpy as np\n'), ((66208, 66235), 'numpy.arange', 'np.arange', (['number_of_points'], {}), '(number_of_points)\n', (66217, 66235), True, 'import numpy as np\n'), ((67535, 67560), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'processes'}), '(processes=processes)\n', (67539, 67560), False, 'from multiprocessing import Pool\n'), ((68019, 68054), 'numpy.array', 'np.array', (['[x[5:8] for x in results]'], {}), '([x[5:8] for x in results])\n', (68027, 68054), True, 'import numpy as np\n'), ((68304, 68339), 'numpy.array', 'np.array', (['[x[5:8] for x in results]'], {}), '([x[5:8] for x in results])\n', (68312, 68339), True, 'import numpy as np\n'), ((68972, 69009), 'numpy.zeros_like', 'np.zeros_like', (['db.labels_'], {'dtype': 'bool'}), '(db.labels_, dtype=bool)\n', (68985, 69009), True, 'import numpy as np\n'), ((69688, 69704), 'numpy.array', 'np.array', (['window'], {}), '(window)\n', (69696, 69704), True, 'import numpy as np\n'), ((70206, 70222), 'numpy.array', 'np.array', (['window'], {}), '(window)\n', (70214, 70222), True, 'import numpy as np\n'), ((70530, 70568), 'numpy.add', 'np.add', (['window_results[1]', 'initial_com'], {}), '(window_results[1], initial_com)\n', (70536, 70568), True, 'import numpy as np\n'), ((70688, 70710), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (70702, 70710), True, 'import numpy as np\n'), ((71042, 71063), 'numpy.sqrt', 'np.sqrt', (['diag[pos[0]]'], {}), '(diag[pos[0]])\n', (71049, 71063), True, 'import numpy as np\n'), ((71448, 71476), 'numpy.linalg.norm', 'np.linalg.norm', (['intersection'], {}), '(intersection)\n', (71462, 71476), True, 'import numpy as np\n'), ((73386, 73413), 'numpy.arange', 'np.arange', (['number_of_points'], {}), '(number_of_points)\n', (73395, 73413), True, 'import numpy as np\n'), ((73887, 73912), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'processes'}), '(processes=processes)\n', (73891, 73912), False, 'from multiprocessing import Pool\n'), ((74459, 74483), 'numpy.mean', 'np.mean', (['results_cleaned'], {}), '(results_cleaned)\n', (74466, 74483), True, 'import numpy as np\n'), ((74577, 74599), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (74591, 74599), True, 'import numpy as np\n'), ((74931, 74952), 'numpy.sqrt', 'np.sqrt', (['diag[pos[0]]'], {}), '(diag[pos[0]])\n', (74938, 74952), True, 'import numpy as np\n'), ((77200, 77227), 'numpy.arange', 'np.arange', (['number_of_points'], {}), '(number_of_points)\n', (77209, 77227), True, 'import numpy as np\n'), ((79507, 79530), 'numpy.hstack', 'np.hstack', (['(b1, b2, b3)'], {}), '((b1, b2, b3))\n', (79516, 79530), True, 'import numpy as np\n'), ((8034, 8056), 'numpy.array', 'np.array', (['transpose[0]'], {}), '(transpose[0])\n', (8042, 8056), True, 'import numpy as np\n'), ((8076, 8098), 'numpy.array', 'np.array', (['transpose[1]'], {}), '(transpose[1])\n', (8084, 8098), True, 'import numpy as np\n'), ((8286, 8328), 'numpy.concatenate', 'np.concatenate', (['(array_a, array_b)'], {'axis': '(1)'}), '((array_a, array_b), axis=1)\n', (8300, 8328), True, 'import numpy as np\n'), ((8351, 8394), 'numpy.concatenate', 'np.concatenate', (['(array_ab, array_c)'], {'axis': '(1)'}), '((array_ab, array_c), axis=1)\n', (8365, 8394), True, 'import numpy as np\n'), ((14786, 14806), 'numpy.linalg.eigvals', 'np.linalg.eigvals', (['T'], {}), '(T)\n', (14803, 14806), True, 'import numpy as np\n'), ((18459, 18471), 'numpy.square', 'np.square', (['c'], {}), '(c)\n', (18468, 18471), True, 'import numpy as np\n'), ((18618, 18630), 'numpy.square', 'np.square', (['b'], {}), '(b)\n', (18627, 18630), True, 'import numpy as np\n'), ((18777, 18789), 'numpy.square', 'np.square', (['b'], {}), '(b)\n', (18786, 18789), True, 'import numpy as np\n'), ((19163, 19174), 'numpy.array', 'np.array', (['j'], {}), '(j)\n', (19171, 19174), True, 'import numpy as np\n'), ((19244, 19255), 'numpy.array', 'np.array', (['j'], {}), '(j)\n', (19252, 19255), True, 'import numpy as np\n'), ((21189, 21204), 'numpy.sin', 'np.sin', (['r_gamma'], {}), '(r_gamma)\n', (21195, 21204), True, 'import numpy as np\n'), ((30850, 30871), 'numpy.array', 'np.array', (['[-0.5, 0.5]'], {}), '([-0.5, 0.5])\n', (30858, 30871), True, 'import numpy as np\n'), ((30909, 30929), 'numpy.array', 'np.array', (['[0.0, 1.0]'], {}), '([0.0, 1.0])\n', (30917, 30929), True, 'import numpy as np\n'), ((31037, 31063), 'numpy.array', 'np.array', (['[0.01, 0.0, 0.0]'], {}), '([0.01, 0.0, 0.0])\n', (31045, 31063), True, 'import numpy as np\n'), ((33170, 33214), 'numpy.array', 'np.array', (['potential_starting_point[1 + adj:]'], {}), '(potential_starting_point[1 + adj:])\n', (33178, 33214), True, 'import numpy as np\n'), ((37371, 37424), 'numpy.array', 'np.array', (['[x[1] for x in final_molecule]'], {'dtype': '"""str"""'}), "([x[1] for x in final_molecule], dtype='str')\n", (37379, 37424), True, 'import numpy as np\n'), ((37948, 37979), 'numpy.around', 'np.around', (['com_frac'], {'decimals': '(8)'}), '(com_frac, decimals=8)\n', (37957, 37979), True, 'import numpy as np\n'), ((38432, 38474), 'numpy.sqrt', 'np.sqrt', (['(x[0] ** 2 + x[1] ** 2 + x[2] ** 2)'], {}), '(x[0] ** 2 + x[1] ** 2 + x[2] ** 2)\n', (38439, 38474), True, 'import numpy as np\n'), ((38479, 38521), 'numpy.sqrt', 'np.sqrt', (['(y[0] ** 2 + y[1] ** 2 + y[2] ** 2)'], {}), '(y[0] ** 2 + y[1] ** 2 + y[2] ** 2)\n', (38486, 38521), True, 'import numpy as np\n'), ((38819, 38841), 'numpy.linalg.norm', 'np.linalg.norm', (['vector'], {}), '(vector)\n', (38833, 38841), True, 'import numpy as np\n'), ((39772, 39799), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'L', 'L'], {}), "('ij,ij->i', L, L)\n", (39781, 39799), True, 'import numpy as np\n'), ((40077, 40098), 'numpy.dot', 'np.dot', (['t_0', 'norm_vec'], {}), '(t_0, norm_vec)\n', (40083, 40098), True, 'import numpy as np\n'), ((40122, 40143), 'numpy.dot', 'np.dot', (['t_1', 'norm_vec'], {}), '(t_1, norm_vec)\n', (40128, 40143), True, 'import numpy as np\n'), ((40213, 40232), 'numpy.linalg.norm', 'np.linalg.norm', (['P_0'], {}), '(P_0)\n', (40227, 40232), True, 'import numpy as np\n'), ((40235, 40254), 'numpy.linalg.norm', 'np.linalg.norm', (['P_1'], {}), '(P_1)\n', (40249, 40254), True, 'import numpy as np\n'), ((43716, 43744), 'numpy.dot', 'np.dot', (['rotation_around_z', 'i'], {}), '(rotation_around_z, i)\n', (43722, 43744), True, 'import numpy as np\n'), ((44114, 44142), 'numpy.dot', 'np.dot', (['rotation_around_y', 'i'], {}), '(rotation_around_y, i)\n', (44120, 44142), True, 'import numpy as np\n'), ((50878, 50888), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (50885, 50888), True, 'import numpy as np\n'), ((51166, 51179), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (51172, 51179), True, 'import numpy as np\n'), ((51224, 51237), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (51230, 51237), True, 'import numpy as np\n'), ((54588, 54613), 'multiprocessing.Pool', 'Pool', ([], {'processes': 'processes'}), '(processes=processes)\n', (54592, 54613), False, 'from multiprocessing import Pool\n'), ((55638, 55668), 'numpy.add', 'np.add', (['result[1]', 'initial_com'], {}), '(result[1], initial_com)\n', (55644, 55668), True, 'import numpy as np\n'), ((59171, 59199), 'numpy.dot', 'np.dot', (['rotation_around_z', 'i'], {}), '(rotation_around_z, i)\n', (59177, 59199), True, 'import numpy as np\n'), ((59569, 59597), 'numpy.dot', 'np.dot', (['rotation_around_y', 'i'], {}), '(rotation_around_y, i)\n', (59575, 59597), True, 'import numpy as np\n'), ((66169, 66179), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (66176, 66179), True, 'import numpy as np\n'), ((66457, 66470), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (66463, 66470), True, 'import numpy as np\n'), ((66515, 66528), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (66521, 66528), True, 'import numpy as np\n'), ((70851, 70878), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'L', 'L'], {}), "('ij,ij->i', L, L)\n", (70860, 70878), True, 'import numpy as np\n'), ((71156, 71177), 'numpy.dot', 'np.dot', (['t_0', 'norm_vec'], {}), '(t_0, norm_vec)\n', (71162, 71177), True, 'import numpy as np\n'), ((71201, 71222), 'numpy.dot', 'np.dot', (['t_1', 'norm_vec'], {}), '(t_1, norm_vec)\n', (71207, 71222), True, 'import numpy as np\n'), ((71234, 71253), 'numpy.linalg.norm', 'np.linalg.norm', (['P_0'], {}), '(P_0)\n', (71248, 71253), True, 'import numpy as np\n'), ((71256, 71275), 'numpy.linalg.norm', 'np.linalg.norm', (['P_1'], {}), '(P_1)\n', (71270, 71275), True, 'import numpy as np\n'), ((73347, 73357), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (73354, 73357), True, 'import numpy as np\n'), ((73635, 73648), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (73641, 73648), True, 'import numpy as np\n'), ((73693, 73706), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (73699, 73706), True, 'import numpy as np\n'), ((74740, 74767), 'numpy.einsum', 'np.einsum', (['"""ij,ij->i"""', 'L', 'L'], {}), "('ij,ij->i', L, L)\n", (74749, 74767), True, 'import numpy as np\n'), ((75045, 75066), 'numpy.dot', 'np.dot', (['t_0', 'norm_vec'], {}), '(t_0, norm_vec)\n', (75051, 75066), True, 'import numpy as np\n'), ((75090, 75111), 'numpy.dot', 'np.dot', (['t_1', 'norm_vec'], {}), '(t_1, norm_vec)\n', (75096, 75111), True, 'import numpy as np\n'), ((75181, 75200), 'numpy.linalg.norm', 'np.linalg.norm', (['P_0'], {}), '(P_0)\n', (75195, 75200), True, 'import numpy as np\n'), ((75203, 75222), 'numpy.linalg.norm', 'np.linalg.norm', (['P_1'], {}), '(P_1)\n', (75217, 75222), True, 'import numpy as np\n'), ((77161, 77171), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (77168, 77171), True, 'import numpy as np\n'), ((77449, 77462), 'numpy.cos', 'np.cos', (['theta'], {}), '(theta)\n', (77455, 77462), True, 'import numpy as np\n'), ((77507, 77520), 'numpy.sin', 'np.sin', (['theta'], {}), '(theta)\n', (77513, 77520), True, 'import numpy as np\n'), ((79262, 79302), 'numpy.sqrt', 'np.sqrt', (['(s * (s - a) * (s - b) * (s - c))'], {}), '(s * (s - a) * (s - b) * (s - c))\n', (79269, 79302), True, 'import numpy as np\n'), ((79476, 79502), 'numpy.column_stack', 'np.column_stack', (['(A, B, C)'], {}), '((A, B, C))\n', (79491, 79502), True, 'import numpy as np\n'), ((7670, 7692), 'numpy.array', 'np.array', (['transpose[1]'], {}), '(transpose[1])\n', (7678, 7692), True, 'import numpy as np\n'), ((7726, 7748), 'numpy.array', 'np.array', (['transpose[2]'], {}), '(transpose[2])\n', (7734, 7748), True, 'import numpy as np\n'), ((7782, 7804), 'numpy.array', 'np.array', (['transpose[3]'], {}), '(transpose[3])\n', (7790, 7804), True, 'import numpy as np\n'), ((18429, 18441), 'numpy.square', 'np.square', (['a'], {}), '(a)\n', (18438, 18441), True, 'import numpy as np\n'), ((18444, 18456), 'numpy.square', 'np.square', (['b'], {}), '(b)\n', (18453, 18456), True, 'import numpy as np\n'), ((18588, 18600), 'numpy.square', 'np.square', (['a'], {}), '(a)\n', (18597, 18600), True, 'import numpy as np\n'), ((18603, 18615), 'numpy.square', 'np.square', (['c'], {}), '(c)\n', (18612, 18615), True, 'import numpy as np\n'), ((18747, 18759), 'numpy.square', 'np.square', (['a'], {}), '(a)\n', (18756, 18759), True, 'import numpy as np\n'), ((18762, 18774), 'numpy.square', 'np.square', (['d'], {}), '(d)\n', (18771, 18774), True, 'import numpy as np\n'), ((21065, 21080), 'numpy.cos', 'np.cos', (['r_alpha'], {}), '(r_alpha)\n', (21071, 21080), True, 'import numpy as np\n'), ((21728, 21742), 'numpy.cos', 'np.cos', (['beta_r'], {}), '(beta_r)\n', (21734, 21742), True, 'import numpy as np\n'), ((21745, 21760), 'numpy.cos', 'np.cos', (['gamma_r'], {}), '(gamma_r)\n', (21751, 21760), True, 'import numpy as np\n'), ((24105, 24129), 'numpy.array', 'np.array', (['[[a_, b_, c_]]'], {}), '([[a_, b_, c_]])\n', (24113, 24129), True, 'import numpy as np\n'), ((24160, 24212), 'numpy.repeat', 'np.repeat', (['mult_matrix', 'coordinates.shape[0]'], {'axis': '(0)'}), '(mult_matrix, coordinates.shape[0], axis=0)\n', (24169, 24212), True, 'import numpy as np\n'), ((38007, 38082), 'numpy.logical_and', 'np.logical_and', (['(com_frac_round >= boundary[0])', '(com_frac_round < boundary[1])'], {}), '(com_frac_round >= boundary[0], com_frac_round < boundary[1])\n', (38021, 38082), True, 'import numpy as np\n'), ((43467, 43482), 'numpy.cos', 'np.cos', (['angle_1'], {}), '(angle_1)\n', (43473, 43482), True, 'import numpy as np\n'), ((43541, 43556), 'numpy.sin', 'np.sin', (['angle_1'], {}), '(angle_1)\n', (43547, 43556), True, 'import numpy as np\n'), ((43558, 43573), 'numpy.cos', 'np.cos', (['angle_1'], {}), '(angle_1)\n', (43564, 43573), True, 'import numpy as np\n'), ((43865, 43880), 'numpy.cos', 'np.cos', (['angle_2'], {}), '(angle_2)\n', (43871, 43880), True, 'import numpy as np\n'), ((43885, 43900), 'numpy.sin', 'np.sin', (['angle_2'], {}), '(angle_2)\n', (43891, 43900), True, 'import numpy as np\n'), ((44004, 44019), 'numpy.cos', 'np.cos', (['angle_2'], {}), '(angle_2)\n', (44010, 44019), True, 'import numpy as np\n'), ((47415, 47432), 'numpy.cos', 'np.cos', (['angle_2_1'], {}), '(angle_2_1)\n', (47421, 47432), True, 'import numpy as np\n'), ((47437, 47454), 'numpy.sin', 'np.sin', (['angle_2_1'], {}), '(angle_2_1)\n', (47443, 47454), True, 'import numpy as np\n'), ((47558, 47575), 'numpy.cos', 'np.cos', (['angle_2_1'], {}), '(angle_2_1)\n', (47564, 47575), True, 'import numpy as np\n'), ((47743, 47760), 'numpy.cos', 'np.cos', (['angle_1_1'], {}), '(angle_1_1)\n', (47749, 47760), True, 'import numpy as np\n'), ((47820, 47837), 'numpy.sin', 'np.sin', (['angle_1_1'], {}), '(angle_1_1)\n', (47826, 47837), True, 'import numpy as np\n'), ((47839, 47856), 'numpy.cos', 'np.cos', (['angle_1_1'], {}), '(angle_1_1)\n', (47845, 47856), True, 'import numpy as np\n'), ((50696, 50725), 'numpy.log10', 'np.log10', (['sphere_surface_area'], {}), '(sphere_surface_area)\n', (50704, 50725), True, 'import numpy as np\n'), ((53624, 53639), 'sklearn.cluster.DBSCAN', 'DBSCAN', ([], {'eps': 'eps'}), '(eps=eps)\n', (53630, 53639), False, 'from sklearn.cluster import DBSCAN\n'), ((58922, 58937), 'numpy.cos', 'np.cos', (['angle_1'], {}), '(angle_1)\n', (58928, 58937), True, 'import numpy as np\n'), ((58996, 59011), 'numpy.sin', 'np.sin', (['angle_1'], {}), '(angle_1)\n', (59002, 59011), True, 'import numpy as np\n'), ((59013, 59028), 'numpy.cos', 'np.cos', (['angle_1'], {}), '(angle_1)\n', (59019, 59028), True, 'import numpy as np\n'), ((59320, 59335), 'numpy.cos', 'np.cos', (['angle_2'], {}), '(angle_2)\n', (59326, 59335), True, 'import numpy as np\n'), ((59340, 59355), 'numpy.sin', 'np.sin', (['angle_2'], {}), '(angle_2)\n', (59346, 59355), True, 'import numpy as np\n'), ((59459, 59474), 'numpy.cos', 'np.cos', (['angle_2'], {}), '(angle_2)\n', (59465, 59474), True, 'import numpy as np\n'), ((62630, 62662), 'numpy.dot', 'np.dot', (['rotation_around_z', 'i[5:]'], {}), '(rotation_around_z, i[5:])\n', (62636, 62662), True, 'import numpy as np\n'), ((62679, 62711), 'numpy.dot', 'np.dot', (['rotation_around_z', 'i[5:]'], {}), '(rotation_around_z, i[5:])\n', (62685, 62711), True, 'import numpy as np\n'), ((62728, 62760), 'numpy.dot', 'np.dot', (['rotation_around_z', 'i[5:]'], {}), '(rotation_around_z, i[5:])\n', (62734, 62760), True, 'import numpy as np\n'), ((62883, 62911), 'numpy.dot', 'np.dot', (['rotation_around_y', 'i'], {}), '(rotation_around_y, i)\n', (62889, 62911), True, 'import numpy as np\n'), ((62928, 62956), 'numpy.dot', 'np.dot', (['rotation_around_y', 'i'], {}), '(rotation_around_y, i)\n', (62934, 62956), True, 'import numpy as np\n'), ((62973, 63001), 'numpy.dot', 'np.dot', (['rotation_around_y', 'i'], {}), '(rotation_around_y, i)\n', (62979, 63001), True, 'import numpy as np\n'), ((65987, 66016), 'numpy.log10', 'np.log10', (['sphere_surface_area'], {}), '(sphere_surface_area)\n', (65995, 66016), True, 'import numpy as np\n'), ((68915, 68930), 'sklearn.cluster.DBSCAN', 'DBSCAN', ([], {'eps': 'eps'}), '(eps=eps)\n', (68921, 68930), False, 'from sklearn.cluster import DBSCAN\n'), ((73165, 73194), 'numpy.log10', 'np.log10', (['sphere_surface_area'], {}), '(sphere_surface_area)\n', (73173, 73194), True, 'import numpy as np\n'), ((76979, 77008), 'numpy.log10', 'np.log10', (['sphere_surface_area'], {}), '(sphere_surface_area)\n', (76987, 77008), True, 'import numpy as np\n'), ((8117, 8139), 'numpy.array', 'np.array', (['transpose[2]'], {}), '(transpose[2])\n', (8125, 8139), True, 'import numpy as np\n'), ((8173, 8195), 'numpy.array', 'np.array', (['transpose[3]'], {}), '(transpose[3])\n', (8181, 8195), True, 'import numpy as np\n'), ((8229, 8251), 'numpy.array', 'np.array', (['transpose[4]'], {}), '(transpose[4])\n', (8237, 8251), True, 'import numpy as np\n'), ((14689, 14698), 'numpy.sum', 'np.sum', (['S'], {}), '(S)\n', (14695, 14698), True, 'import numpy as np\n'), ((20835, 20850), 'numpy.cos', 'np.cos', (['r_gamma'], {}), '(r_gamma)\n', (20841, 20850), True, 'import numpy as np\n'), ((21083, 21097), 'numpy.cos', 'np.cos', (['r_beta'], {}), '(r_beta)\n', (21089, 21097), True, 'import numpy as np\n'), ((21100, 21115), 'numpy.cos', 'np.cos', (['r_gamma'], {}), '(r_gamma)\n', (21106, 21115), True, 'import numpy as np\n'), ((21684, 21699), 'numpy.sin', 'np.sin', (['gamma_r'], {}), '(gamma_r)\n', (21690, 21699), True, 'import numpy as np\n'), ((34243, 34264), 'numpy.array', 'np.array', (['i[1 + adj:]'], {}), '(i[1 + adj:])\n', (34251, 34264), True, 'import numpy as np\n'), ((43485, 43500), 'numpy.sin', 'np.sin', (['angle_1'], {}), '(angle_1)\n', (43491, 43500), True, 'import numpy as np\n'), ((43984, 43999), 'numpy.sin', 'np.sin', (['angle_2'], {}), '(angle_2)\n', (43990, 43999), True, 'import numpy as np\n'), ((47536, 47553), 'numpy.sin', 'np.sin', (['angle_2_1'], {}), '(angle_2_1)\n', (47542, 47553), True, 'import numpy as np\n'), ((47763, 47780), 'numpy.sin', 'np.sin', (['angle_1_1'], {}), '(angle_1_1)\n', (47769, 47780), True, 'import numpy as np\n'), ((55158, 55194), 'numpy.array', 'np.array', (['clustered_results[cluster]'], {}), '(clustered_results[cluster])\n', (55166, 55194), True, 'import numpy as np\n'), ((58940, 58955), 'numpy.sin', 'np.sin', (['angle_1'], {}), '(angle_1)\n', (58946, 58955), True, 'import numpy as np\n'), ((59439, 59454), 'numpy.sin', 'np.sin', (['angle_2'], {}), '(angle_2)\n', (59445, 59454), True, 'import numpy as np\n'), ((71311, 71330), 'numpy.linalg.norm', 'np.linalg.norm', (['P_1'], {}), '(P_1)\n', (71325, 71330), True, 'import numpy as np\n'), ((75258, 75277), 'numpy.linalg.norm', 'np.linalg.norm', (['P_0'], {}), '(P_0)\n', (75272, 75277), True, 'import numpy as np\n'), ((20767, 20782), 'numpy.cos', 'np.cos', (['r_gamma'], {}), '(r_gamma)\n', (20773, 20782), True, 'import numpy as np\n'), ((20818, 20832), 'numpy.cos', 'np.cos', (['r_beta'], {}), '(r_beta)\n', (20824, 20832), True, 'import numpy as np\n'), ((20747, 20761), 'numpy.cos', 'np.cos', (['r_beta'], {}), '(r_beta)\n', (20753, 20761), True, 'import numpy as np\n'), ((20800, 20815), 'numpy.cos', 'np.cos', (['r_alpha'], {}), '(r_alpha)\n', (20806, 20815), True, 'import numpy as np\n'), ((34582, 34595), 'numpy.where', 'np.where', (['idx'], {}), '(idx)\n', (34590, 34595), True, 'import numpy as np\n'), ((34688, 34710), 'numpy.array', 'np.array', (['atom_coor[j]'], {}), '(atom_coor[j])\n', (34696, 34710), True, 'import numpy as np\n'), ((35404, 35418), 'numpy.where', 'np.where', (['sidx'], {}), '(sidx)\n', (35412, 35418), True, 'import numpy as np\n'), ((54736, 54772), 'numpy.array', 'np.array', (['clustered_results[cluster]'], {}), '(clustered_results[cluster])\n', (54744, 54772), True, 'import numpy as np\n'), ((20726, 20741), 'numpy.cos', 'np.cos', (['r_alpha'], {}), '(r_alpha)\n', (20732, 20741), True, 'import numpy as np\n'), ((35646, 35669), 'numpy.array', 'np.array', (['satom_coor[j]'], {}), '(satom_coor[j])\n', (35654, 35669), True, 'import numpy as np\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2019-03-05 15:45 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('catalog', '0003_merge_20190305_1545'), ] operations = [ migrations.AlterField( model_name='image', name='imageurl', field=models.URLField(primary_key=True, serialize=False), ), migrations.AlterField( model_name='turker', name='turkerid', field=models.CharField(max_length=100, primary_key=True, serialize=False), ), ]
[ "django.db.models.CharField", "django.db.models.URLField" ]
[((403, 453), 'django.db.models.URLField', 'models.URLField', ([], {'primary_key': '(True)', 'serialize': '(False)'}), '(primary_key=True, serialize=False)\n', (418, 453), False, 'from django.db import migrations, models\n'), ((577, 644), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'primary_key': '(True)', 'serialize': '(False)'}), '(max_length=100, primary_key=True, serialize=False)\n', (593, 644), False, 'from django.db import migrations, models\n')]
import grpc from six import iteritems from . import hetr_pb2 from . import hetr_pb2_grpc from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf,\ pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python import logging _TIMEOUT_SECONDS = 600 logger = logging.getLogger(__name__) def is_channel_ready(channel): status = channel._channel.check_connectivity_state(True) return ((status == 0) or (status == 2)) # 0: IDLE, 2: READY class RPCComputationClient(object): def __init__(self, comp_id, stub): self.comp_id = comp_id self.RPC = stub self.feed_input_response_future = None def feed_input(self, values): logger.debug("client: feed input") pb_values = [] for v in values: pb_val = hetr_pb2.Value() if is_scalar_type(v): assign_scalar(pb_val.scalar, v) else: pb_val.tensor.CopyFrom(tensor_to_protobuf(v)) pb_values.append(pb_val) self.feed_input_response_future = self.RPC.FeedInput.future( hetr_pb2.FeedInputRequest( comp_id=self.comp_id, values=pb_values), _TIMEOUT_SECONDS) def get_results(self): logger.debug("client: get results") if self.feed_input_response_future is None: raise RuntimeError("call feed_input before get_results") response = self.feed_input_response_future.result() self.feed_input_response_future = None if not response.status: raise RuntimeError("RPC feed_input request failed: {}".format(response.message)) response = self.RPC.GetResults( hetr_pb2.GetResultsRequest(comp_id=self.comp_id), _TIMEOUT_SECONDS) if not response.status: raise RuntimeError("RPC get_results request failed: {}".format(response.message)) return_list = [] for r in response.results: if r.HasField('scalar'): return_list.append(protobuf_scalar_to_python(r.scalar)) else: return_list.append(pb_to_tensor(r.tensor)) return_dict = {op: return_list[mypos] for (op, mypos) in iteritems(self.returns)} return return_dict class RPCTransformerClient(object): def __init__(self, transformer_type, server_address='localhost'): logger.debug("client: init, transformer: %s, server_address: %s", transformer_type, server_address) self.transformer_type = transformer_type self.server_address = server_address self.computations = dict() self.computation_builds = dict() self.comp_id_ctr = 0 self.is_trans_built = False self.computation_response_future = None self.close_transformer_response_future = None def set_server_address(self, address): if self.is_trans_built: logger.debug("client: set_server_address: transformer is already built, \ skip server address") return self.server_address = address def build_transformer(self): logger.debug("client: build_transformer, server address: %s", self.server_address) if self.is_trans_built: logger.debug("client: build_transformer: transformer is already built") return options = [('grpc.max_send_message_length', -1), ('grpc.max_receive_message_length', -1)] channel = grpc.insecure_channel(self.server_address, options=options) if not is_channel_ready(channel): raise RuntimeError("gRPC channel is not ready...") self.RPC = hetr_pb2_grpc.HetrStub(channel) if self.close_transformer_response_future is not None: response = self.close_transformer_response_future.result() if not response.status: raise RuntimeError("RPC close_transformer request failed: {}" .format(response.message)) self.is_trans_built = False self.close_transformer_response_future = None response = self.RPC.BuildTransformer( hetr_pb2.BuildTransformerRequest(transformer_type=self.transformer_type), _TIMEOUT_SECONDS) if response.status: self.is_trans_built = True else: self.is_trans_built = False raise RuntimeError("RPC build_transformer request failed: {}".format(response.message)) def create_computation(self, pb_graph, returns, placeholders): logger.debug("client: create_computation") def make_computation_request(pb_ops, pb_edges, pb_returns=None, pb_placeholders=None): if pb_returns or pb_placeholders: return hetr_pb2.ComputationRequest( ops=pb_ops, edges=pb_edges, returns=pb_returns, placeholders=pb_placeholders) else: return hetr_pb2.ComputationRequest( ops=pb_ops, edges=pb_edges) def generate_messages(): pb_returns = [op_to_protobuf(o) for o in returns] pb_placeholders = [op_to_protobuf(o) for o in placeholders] for pb_ops, pb_edges in pb_graph: msg = make_computation_request( pb_ops, pb_edges, pb_returns, pb_placeholders) yield msg pb_returns, pb_placeholders = [], [] if not self.is_trans_built: raise RuntimeError("call build_transformer before create_computation") self.computation_response_future = self.RPC.Computation.future( generate_messages(), _TIMEOUT_SECONDS) def get_computation(self): logger.debug("client: get_computation") if self.computation_response_future is None: raise RuntimeError("call create_computation before get_computation") response = self.computation_response_future.result() self.computation_response_future = None if response.comp_id >= 0: rpcComputationClient = RPCComputationClient(response.comp_id, self.RPC) return rpcComputationClient else: raise RuntimeError("RPC computation request failed: {}".format(response.message)) def close_transformer(self): logger.debug("client: close_transformer") if self.is_trans_built: self.close_transformer_response_future = self.RPC.CloseTransformer.future( hetr_pb2.CloseTransformerRequest(), _TIMEOUT_SECONDS) def close(self): logger.debug("client: close") if self.close_transformer_response_future is not None: response = self.close_transformer_response_future.result() if not response.status: raise RuntimeError("RPC close_transformer request failed: {}" .format(response.message)) self.is_trans_built = False self.close_transformer_response_future = None try: self.RPC.Close.future( hetr_pb2.CloseRequest(), _TIMEOUT_SECONDS) except: pass
[ "ngraph.op_graph.serde.serde.pb_to_tensor", "ngraph.op_graph.serde.serde.tensor_to_protobuf", "ngraph.op_graph.serde.serde.assign_scalar", "grpc.insecure_channel", "ngraph.op_graph.serde.serde.protobuf_scalar_to_python", "ngraph.op_graph.serde.serde.op_to_protobuf", "ngraph.op_graph.serde.serde.is_scalar_type", "six.iteritems", "logging.getLogger" ]
[((291, 318), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (308, 318), False, 'import logging\n'), ((3516, 3575), 'grpc.insecure_channel', 'grpc.insecure_channel', (['self.server_address'], {'options': 'options'}), '(self.server_address, options=options)\n', (3537, 3575), False, 'import grpc\n'), ((836, 853), 'ngraph.op_graph.serde.serde.is_scalar_type', 'is_scalar_type', (['v'], {}), '(v)\n', (850, 853), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n'), ((871, 902), 'ngraph.op_graph.serde.serde.assign_scalar', 'assign_scalar', (['pb_val.scalar', 'v'], {}), '(pb_val.scalar, v)\n', (884, 902), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n'), ((2248, 2271), 'six.iteritems', 'iteritems', (['self.returns'], {}), '(self.returns)\n', (2257, 2271), False, 'from six import iteritems\n'), ((5194, 5211), 'ngraph.op_graph.serde.serde.op_to_protobuf', 'op_to_protobuf', (['o'], {}), '(o)\n', (5208, 5211), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n'), ((5261, 5278), 'ngraph.op_graph.serde.serde.op_to_protobuf', 'op_to_protobuf', (['o'], {}), '(o)\n', (5275, 5278), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n'), ((960, 981), 'ngraph.op_graph.serde.serde.tensor_to_protobuf', 'tensor_to_protobuf', (['v'], {}), '(v)\n', (978, 981), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n'), ((2046, 2081), 'ngraph.op_graph.serde.serde.protobuf_scalar_to_python', 'protobuf_scalar_to_python', (['r.scalar'], {}), '(r.scalar)\n', (2071, 2081), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n'), ((2136, 2158), 'ngraph.op_graph.serde.serde.pb_to_tensor', 'pb_to_tensor', (['r.tensor'], {}), '(r.tensor)\n', (2148, 2158), False, 'from ngraph.op_graph.serde.serde import op_to_protobuf, tensor_to_protobuf, pb_to_tensor, is_scalar_type, assign_scalar, protobuf_scalar_to_python\n')]
import sys import ast import io class Visitor(ast.NodeVisitor): def __init__(self, f): self.f = f def generic_visit(self, node): self.f.write(ast.dump(node)) self.f.write("\n") super().generic_visit(node) def visit_Assign(self, node): for n in node.targets: self.visit(n) self.f.write(" = ") self.visit(node.value) self.f.write("\n") def visit_Name(self, node): self.f.write(node.id) def visit_Num(self, node): self.f.write(str(node.n)) SRC = """\ a = 1 a = b = 1 """ EXP = """\ Module(body=[Assign(targets=[Name(id='a', ctx=Store())], value=Num(n=1)), Assign(targets=[Name(id='a', ctx=Store()), Name(id='b', ctx=Store())], value=Num(n=1))]) a = 1 a = b = 1 """ t = ast.parse(SRC) buf = io.StringIO() visitor = Visitor(buf) visitor.visit(t) assert buf.getvalue() == EXP
[ "ast.dump", "ast.parse", "io.StringIO" ]
[((793, 807), 'ast.parse', 'ast.parse', (['SRC'], {}), '(SRC)\n', (802, 807), False, 'import ast\n'), ((815, 828), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (826, 828), False, 'import io\n'), ((170, 184), 'ast.dump', 'ast.dump', (['node'], {}), '(node)\n', (178, 184), False, 'import ast\n')]
import _sk_fail; _sk_fail._("SocketServer")
[ "_sk_fail._" ]
[((17, 43), '_sk_fail._', '_sk_fail._', (['"""SocketServer"""'], {}), "('SocketServer')\n", (27, 43), False, 'import _sk_fail\n')]
#!/usr/bin/env python3 from typing import List import numpy as np import copy import pprint as pp from scipy.misc import logsumexp from scipy.stats import beta from neuralmonkey.vocabulary import Vocabulary from n_gram_model import NGramModel from hypothesis import Hypothesis, ExpandFunction from beam_search import score_hypothesis, compute_feature, \ log_softmax, expand_null, empty_hypothesis def list_startswith(list1, list2): return all([token1 == token2 for token1, token2 in zip(list1, list2)]) def update_weights(violation_hyp: Hypothesis, target_hyp: Hypothesis, weights: dict, states_cnt: int): LEARNING_RATE = 0.0005 for key in weights.keys(): weights[key] += LEARNING_RATE * (compute_feature(key, target_hyp, states_cnt) - compute_feature(key, violation_hyp, states_cnt)) def add_expanded_hyp( ctc_table: np.ndarray, weights: dict, row: int, col: int, candidate_hyp: Hypothesis, parent: (int, int)): current_hyp = ctc_table[row, col] weights = { "lm_score" : 0.0, "null_trailing" : 0.0, "null_token_ratio" : 0.0 } if current_hyp: score_current = score_hypothesis(current_hyp[0], weights, 0) score_candidate = score_hypothesis(candidate_hyp, weights, 0) if score_candidate <= score_current: return candidate_hyp.recombine_with(current_hyp[0]) ctc_table[row, col] = (candidate_hyp, parent) def ctc_path( target: List, log_prob_table: np.ndarray, weights: dict, lm: NGramModel, vocabulary: Vocabulary) -> List[Hypothesis]: rows = len(target) + 1 time_steps = len(log_prob_table) # error in data, target cannot be decoded if time_steps < len(target): return None ctc_table = np.empty(shape=(rows, time_steps), dtype=tuple) # fill the starting cell with the empty hypothesis ctc_table[0,0] = (empty_hypothesis(), None) for time in range(time_steps-1): null_log_prob = log_prob_table[time, -1] # fill only the space around the diagonal min_row = max(0, rows - (time_steps-time)) max_row = min(time + 1, len(target)) for row in range(min_row, max_row): hyp = ctc_table[row, time][0] next_token = target[row] next_token_idx = vocabulary.word_to_index[next_token] # add eps expanded = expand_null(hyp, null_log_prob) add_expanded_hyp(ctc_table, weights, row, time+1, candidate_hyp=expanded, parent=(row, time)) # add next token next_token_score = log_prob_table[time, next_token_idx] expanded = lm.expand_token(hyp, next_token, next_token_score) add_expanded_hyp(ctc_table, weights, row+1, time+1, candidate_hyp=expanded, parent=(row, time)) # reconstruct path path = [] hyp = ctc_table[rows-1, time_steps-1] # error in data if hyp is None: return None while True: path.append(hyp[0]) prev_idx = hyp[1] if prev_idx is None: break hyp = ctc_table[prev_idx] path.reverse() return path def train_weights( logits_table: np.ndarray, beam_width: int, vocabulary: Vocabulary, target: list, weights: dict, lm: NGramModel) -> List[str]: assert beam_width >= 1 log_prob_table = log_softmax(logits_table) hypotheses = [empty_hypothesis()] time_steps = log_prob_table.shape[0] target_hyp_path = ctc_path(target, log_prob_table, weights, lm, vocabulary) # error in data if target_hyp_path is None: return states_cnt = len(log_prob_table) for time in range(len(log_prob_table)-1): log_probs = log_prob_table[time] null_log_prob = log_probs[-1] token_log_probs = log_probs[:-1] new_hypotheses = [] str_to_hyp = {} for hyp in hypotheses: expanded = expand_null(hyp, null_log_prob) str_to_hyp[" ".join(expanded.tokens)] = ( expanded, len(new_hypotheses)) new_hypotheses.append(expanded) best_tokens = np.argpartition( -token_log_probs, 2 * beam_width)[:2 * beam_width] best_scores = token_log_probs[best_tokens] for hyp_index, hyp in enumerate(hypotheses): for token_index, score in zip(best_tokens, best_scores): token = vocabulary.index_to_word[token_index] expanded = lm.expand_token(hyp, token, score) score = score_hypothesis(expanded, weights, states_cnt) hyp_str = " ".join(expanded.tokens) if hyp_str in str_to_hyp: orig_hyp, hyp_index = str_to_hyp[hyp_str] expanded.recombine_with(orig_hyp) new_hypotheses[hyp_index] = expanded str_to_hyp[hyp_str] = (expanded, hyp_index) else: str_to_hyp[hyp_str] = (expanded, len(new_hypotheses)) new_hypotheses.append(expanded) target_candidates_indices = [i for i, h in enumerate(new_hypotheses) if list_startswith(target, h.tokens)] new_scores = np.array([score_hypothesis(h, weights, states_cnt) for h in new_hypotheses]) target_candidates = [new_hypotheses[i] for i in target_candidates_indices] target_candidates_tokens_cnt = np.array([len(h.tokens) for h in target_candidates]) best_hyp_indices = np.argsort(-new_scores) target_hyp_ranks = np.in1d(best_hyp_indices, target_candidates_indices).nonzero()[0] hypotheses = [new_hypotheses[i] for i in best_hyp_indices[:beam_width]] # hypotheses are out of the beam or no hypotheses can be finished in time if (all(target_hyp_ranks >= beam_width) or all(target_candidates_tokens_cnt + (time_steps - time) < len(target))): for i in range(beam_width): violation_hyp = hypotheses[i] target_hyp = target_hyp_path[time+1] update_weights(violation_hyp, target_hyp, weights, states_cnt) return
[ "beam_search.empty_hypothesis", "numpy.empty", "beam_search.compute_feature", "beam_search.score_hypothesis", "numpy.argsort", "numpy.argpartition", "beam_search.expand_null", "beam_search.log_softmax", "numpy.in1d" ]
[((1875, 1922), 'numpy.empty', 'np.empty', ([], {'shape': '(rows, time_steps)', 'dtype': 'tuple'}), '(shape=(rows, time_steps), dtype=tuple)\n', (1883, 1922), True, 'import numpy as np\n'), ((3544, 3569), 'beam_search.log_softmax', 'log_softmax', (['logits_table'], {}), '(logits_table)\n', (3555, 3569), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((1255, 1299), 'beam_search.score_hypothesis', 'score_hypothesis', (['current_hyp[0]', 'weights', '(0)'], {}), '(current_hyp[0], weights, 0)\n', (1271, 1299), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((1326, 1369), 'beam_search.score_hypothesis', 'score_hypothesis', (['candidate_hyp', 'weights', '(0)'], {}), '(candidate_hyp, weights, 0)\n', (1342, 1369), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((2001, 2019), 'beam_search.empty_hypothesis', 'empty_hypothesis', ([], {}), '()\n', (2017, 2019), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((3588, 3606), 'beam_search.empty_hypothesis', 'empty_hypothesis', ([], {}), '()\n', (3604, 3606), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((5799, 5822), 'numpy.argsort', 'np.argsort', (['(-new_scores)'], {}), '(-new_scores)\n', (5809, 5822), True, 'import numpy as np\n'), ((2497, 2528), 'beam_search.expand_null', 'expand_null', (['hyp', 'null_log_prob'], {}), '(hyp, null_log_prob)\n', (2508, 2528), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((4111, 4142), 'beam_search.expand_null', 'expand_null', (['hyp', 'null_log_prob'], {}), '(hyp, null_log_prob)\n', (4122, 4142), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((4311, 4360), 'numpy.argpartition', 'np.argpartition', (['(-token_log_probs)', '(2 * beam_width)'], {}), '(-token_log_probs, 2 * beam_width)\n', (4326, 4360), True, 'import numpy as np\n'), ((765, 809), 'beam_search.compute_feature', 'compute_feature', (['key', 'target_hyp', 'states_cnt'], {}), '(key, target_hyp, states_cnt)\n', (780, 809), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((854, 901), 'beam_search.compute_feature', 'compute_feature', (['key', 'violation_hyp', 'states_cnt'], {}), '(key, violation_hyp, states_cnt)\n', (869, 901), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((4714, 4761), 'beam_search.score_hypothesis', 'score_hypothesis', (['expanded', 'weights', 'states_cnt'], {}), '(expanded, weights, states_cnt)\n', (4730, 4761), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((5430, 5470), 'beam_search.score_hypothesis', 'score_hypothesis', (['h', 'weights', 'states_cnt'], {}), '(h, weights, states_cnt)\n', (5446, 5470), False, 'from beam_search import score_hypothesis, compute_feature, log_softmax, expand_null, empty_hypothesis\n'), ((5850, 5902), 'numpy.in1d', 'np.in1d', (['best_hyp_indices', 'target_candidates_indices'], {}), '(best_hyp_indices, target_candidates_indices)\n', (5857, 5902), True, 'import numpy as np\n')]
from __future__ import print_function from six.moves import xrange from ortools.constraint_solver import pywrapcp from ortools.constraint_solver import routing_enums_pb2 import googlemaps gmaps = googlemaps.Client(key='******API_Key******') # Replace with the Google Distance Matrix API Key... class DataProblem(): """Stores the data for the problem""" def __init__(self, location, num_vehicles): """Initializes the data for the problem""" self._num_vehicles = num_vehicles self._locations = [(loc[0], loc[1]) for loc in location] self._depot = 0 @property def num_vehicles(self): """Gets number of vehicles""" return self._num_vehicles @property def locations(self): """Gets locations""" return self._locations @property def num_locations(self): """Gets number of locations""" return len(self.locations) @property def depot(self): """Gets depot location index""" return self._depot def google_distanceNduration(pos1, pos2): dist = gmaps.distance_matrix(pos1, pos2) return dist['rows'][0]['elements'][0]['distance']['value'], dist['rows'][0]['elements'][0]['duration']['value'] class CreateDistanceEvaluator(object): """Creates callback to return distance between points.""" def __init__(self, data): """Initializes the distance matrix.""" self._distances = {} # Computing distance between location to have distance callback in O(1) for from_node in xrange(data.num_locations): self._distances[from_node] = {} for to_node in xrange(data.num_locations): if from_node == to_node: self._distances[from_node][to_node] = 0 else: self._distances[from_node][to_node],_ = google_distanceNduration( data.locations[from_node], data.locations[to_node]) def distance_evaluator(self, from_node, to_node): """Returns the manhattan distance between the two nodes""" return self._distances[from_node][to_node] def add_distance_dimension(routing, distance_evaluator, max_vehicle_distance): """Add Global Span constraint""" distance = "Distance" routing.AddDimension( distance_evaluator, 0, # null slack max_vehicle_distance, # maximum distance per vehicle True, # start cumul to zero distance) distance_dimension = routing.GetDimensionOrDie(distance) distance_dimension.SetGlobalSpanCostCoefficient(100) class ConsolePrinter(): """Print solution to console""" def __init__(self, data, routing, assignment): """Initializes the printer""" self._data = data self._routing = routing self._assignment = assignment @property def data(self): """Gets problem data""" return self._data @property def routing(self): """Gets routing model""" return self._routing @property def assignment(self): """Gets routing model""" return self._assignment def print(self): """Prints assignment on console""" # Inspect solution. total_dist = 0 total_time = 0 for vehicle_id in xrange(self.data.num_vehicles): index = self.routing.Start(vehicle_id) plan_output = 'Route for vehicle {0}:\n'.format(vehicle_id) route_dist = 0 route_time = 0 while not self.routing.IsEnd(index): node_index = self.routing.IndexToNode(index) next_node_index = self.routing.IndexToNode( self.assignment.Value(self.routing.NextVar(index))) dist, time = google_distanceNduration( self.data.locations[node_index], self.data.locations[next_node_index]) route_dist += dist route_time += time plan_output += ' {0} -> '.format(node_index) index = self.assignment.Value(self.routing.NextVar(index)) node_index = self.routing.IndexToNode(index) total_dist += route_dist total_time += route_time plan_output += ' {0}\n'.format(node_index) plan_output += 'Distance of the route: {0}m\n'.format(route_dist) print(plan_output) print('Total Distance of all routes: {0}m'.format(total_dist)) print('Total Time of all routes: {0}min'.format(total_time/60)) def main(): """Entry point of the program""" # Input locations. inp = eval(input()) # Input from node.js location = inp['location'] num_vehicles = inp['num_vehicles'] max_vehicle_distance = inp['max_vehicle_distance'] # Instantiate the data problem. data = DataProblem(location, num_vehicles) # Create Routing Model routing = pywrapcp.RoutingModel(data.num_locations, data.num_vehicles, data.depot) # Define weight of each edge distance_evaluator = CreateDistanceEvaluator(data).distance_evaluator routing.SetArcCostEvaluatorOfAllVehicles(distance_evaluator) add_distance_dimension(routing, distance_evaluator, max_vehicle_distance) # Setting first solution heuristic (cheapest addition). search_parameters = pywrapcp.RoutingModel.DefaultSearchParameters() search_parameters.first_solution_strategy = ( routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC) # Solve the problem. assignment = routing.SolveWithParameters(search_parameters) printer = ConsolePrinter(data, routing, assignment) printer.print() if __name__ == '__main__': main()
[ "googlemaps.Client", "ortools.constraint_solver.pywrapcp.RoutingModel", "six.moves.xrange", "ortools.constraint_solver.pywrapcp.RoutingModel.DefaultSearchParameters" ]
[((203, 247), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': '"""******API_Key******"""'}), "(key='******API_Key******')\n", (220, 247), False, 'import googlemaps\n'), ((5109, 5181), 'ortools.constraint_solver.pywrapcp.RoutingModel', 'pywrapcp.RoutingModel', (['data.num_locations', 'data.num_vehicles', 'data.depot'], {}), '(data.num_locations, data.num_vehicles, data.depot)\n', (5130, 5181), False, 'from ortools.constraint_solver import pywrapcp\n'), ((5530, 5577), 'ortools.constraint_solver.pywrapcp.RoutingModel.DefaultSearchParameters', 'pywrapcp.RoutingModel.DefaultSearchParameters', ([], {}), '()\n', (5575, 5577), False, 'from ortools.constraint_solver import pywrapcp\n'), ((1596, 1622), 'six.moves.xrange', 'xrange', (['data.num_locations'], {}), '(data.num_locations)\n', (1602, 1622), False, 'from six.moves import xrange\n'), ((3433, 3463), 'six.moves.xrange', 'xrange', (['self.data.num_vehicles'], {}), '(self.data.num_vehicles)\n', (3439, 3463), False, 'from six.moves import xrange\n'), ((1697, 1723), 'six.moves.xrange', 'xrange', (['data.num_locations'], {}), '(data.num_locations)\n', (1703, 1723), False, 'from six.moves import xrange\n')]
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2018/10/18 1:13 PM # @Author : <NAME> # @File : urls.py # @Software: Pycharm professional from django.conf.urls import include, url from data import views urlpatterns = [ url(r'^large$', views.large_data), url(r'^mini$', views.mini_data), url(r'^error$', views.error_data) ]
[ "django.conf.urls.url" ]
[((240, 272), 'django.conf.urls.url', 'url', (['"""^large$"""', 'views.large_data'], {}), "('^large$', views.large_data)\n", (243, 272), False, 'from django.conf.urls import include, url\n'), ((279, 309), 'django.conf.urls.url', 'url', (['"""^mini$"""', 'views.mini_data'], {}), "('^mini$', views.mini_data)\n", (282, 309), False, 'from django.conf.urls import include, url\n'), ((316, 348), 'django.conf.urls.url', 'url', (['"""^error$"""', 'views.error_data'], {}), "('^error$', views.error_data)\n", (319, 348), False, 'from django.conf.urls import include, url\n')]
import pandas as pd import numpy as np import matplotlib.pyplot as plt import seaborn as sns import os dir = 'plots' if not os.path.exists(dir): os.mkdir(dir) df = pd.read_csv('final_data/final-data.csv', index_col='player-name') df =df.replace(-1,np.nan) # describe print('--- Description ---') print(df.describe()) # correlation plt.clf() sns.heatmap(df.corr()) plt.savefig(os.path.join(dir,'corr-all.png')) corr_cols = ['all-time-runs-scored', 'all-time-average', 'all-time-strike-rate', 'all-time-wkts', 'ipl-last-n-runs-scored', 'ipl-last-n-wkts', 'ipl-last-n-points', 'ipl-1-points', 'ipl-2-points', 'ipl-3-points', 'ipl-4-points', 'ipl-5-points', 'points'] plt.clf() sns.heatmap(df[corr_cols].corr()) plt.savefig(os.path.join(dir,'corr-some.png')) print('--- Points correlation ---') print(df.corr()['points'].sort_values(ascending=False)) # plots for each column n = len(df.columns) for i,col in enumerate(df.columns): print(f'Plotting {col} ({i+1}/{n})') plt.clf() sns.displot(x=df[col]) plt.savefig(os.path.join(dir,f'{col}-hist.png')) # violin plots comparing alltime to IPL
[ "os.mkdir", "seaborn.displot", "matplotlib.pyplot.clf", "pandas.read_csv", "os.path.exists", "os.path.join" ]
[((170, 235), 'pandas.read_csv', 'pd.read_csv', (['"""final_data/final-data.csv"""'], {'index_col': '"""player-name"""'}), "('final_data/final-data.csv', index_col='player-name')\n", (181, 235), True, 'import pandas as pd\n'), ((340, 349), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (347, 349), True, 'import matplotlib.pyplot as plt\n'), ((701, 710), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (708, 710), True, 'import matplotlib.pyplot as plt\n'), ((125, 144), 'os.path.exists', 'os.path.exists', (['dir'], {}), '(dir)\n', (139, 144), False, 'import os\n'), ((150, 163), 'os.mkdir', 'os.mkdir', (['dir'], {}), '(dir)\n', (158, 163), False, 'import os\n'), ((385, 418), 'os.path.join', 'os.path.join', (['dir', '"""corr-all.png"""'], {}), "(dir, 'corr-all.png')\n", (397, 418), False, 'import os\n'), ((757, 791), 'os.path.join', 'os.path.join', (['dir', '"""corr-some.png"""'], {}), "(dir, 'corr-some.png')\n", (769, 791), False, 'import os\n'), ((1011, 1020), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1018, 1020), True, 'import matplotlib.pyplot as plt\n'), ((1025, 1047), 'seaborn.displot', 'sns.displot', ([], {'x': 'df[col]'}), '(x=df[col])\n', (1036, 1047), True, 'import seaborn as sns\n'), ((1064, 1100), 'os.path.join', 'os.path.join', (['dir', 'f"""{col}-hist.png"""'], {}), "(dir, f'{col}-hist.png')\n", (1076, 1100), False, 'import os\n')]
import json from google.appengine.ext import ndb from controllers.apiv3.api_base_controller import ApiBaseController from controllers.apiv3.model_properties import filter_event_properties, filter_team_properties from database.district_query import DistrictListQuery from database.event_query import DistrictEventsQuery from database.team_query import DistrictTeamsQuery class ApiDistrictListController(ApiBaseController): CACHE_VERSION = 0 CACHE_HEADER_LENGTH = 60 * 60 * 24 def _track_call(self, year): self._track_call_defer('district/list', year) def _render(self, year): district_list, self._last_modified = DistrictListQuery(int(year)).fetch(dict_version=3, return_updated=True) return json.dumps(district_list, ensure_ascii=True, indent=True, sort_keys=True) class ApiDistrictEventsController(ApiBaseController): CACHE_VERSION = 0 CACHE_HEADER_LENGTH = 60 * 60 * 24 def _track_call(self, district_key, year, model_type=None): action = 'district/events' if model_type: action += '/{}'.format(model_type) self._track_call_defer(action, '{}/{}'.format(district_key, year)) def _render(self, district_key, year, model_type=None): events, self._last_modified = DistrictEventsQuery('{}{}'.format(year, district_key)).fetch(dict_version=3, return_updated=True) if model_type is not None: events = filter_event_properties(events, model_type) return json.dumps(events, ensure_ascii=True, indent=True, sort_keys=True) class ApiDistrictTeamsController(ApiBaseController): CACHE_VERSION = 0 CACHE_HEADER_LENGTH = 60 * 60 * 24 def _track_call(self, district_key, year, model_type=None): action = 'district/teams' if model_type: action += '/{}'.format(model_type) self._track_call_defer(action, '{}/{}'.format(district_key, year)) def _render(self, district_key, year, model_type=None): teams, self._last_modified = DistrictTeamsQuery('{}{}'.format(year, district_key)).fetch(dict_version=3, return_updated=True) if model_type is not None: teams = filter_team_properties(teams, model_type) return json.dumps(teams, ensure_ascii=True, indent=True, sort_keys=True)
[ "controllers.apiv3.model_properties.filter_event_properties", "controllers.apiv3.model_properties.filter_team_properties", "json.dumps" ]
[((737, 810), 'json.dumps', 'json.dumps', (['district_list'], {'ensure_ascii': '(True)', 'indent': '(True)', 'sort_keys': '(True)'}), '(district_list, ensure_ascii=True, indent=True, sort_keys=True)\n', (747, 810), False, 'import json\n'), ((1485, 1551), 'json.dumps', 'json.dumps', (['events'], {'ensure_ascii': '(True)', 'indent': '(True)', 'sort_keys': '(True)'}), '(events, ensure_ascii=True, indent=True, sort_keys=True)\n', (1495, 1551), False, 'import json\n'), ((2219, 2284), 'json.dumps', 'json.dumps', (['teams'], {'ensure_ascii': '(True)', 'indent': '(True)', 'sort_keys': '(True)'}), '(teams, ensure_ascii=True, indent=True, sort_keys=True)\n', (2229, 2284), False, 'import json\n'), ((1426, 1469), 'controllers.apiv3.model_properties.filter_event_properties', 'filter_event_properties', (['events', 'model_type'], {}), '(events, model_type)\n', (1449, 1469), False, 'from controllers.apiv3.model_properties import filter_event_properties, filter_team_properties\n'), ((2162, 2203), 'controllers.apiv3.model_properties.filter_team_properties', 'filter_team_properties', (['teams', 'model_type'], {}), '(teams, model_type)\n', (2184, 2203), False, 'from controllers.apiv3.model_properties import filter_event_properties, filter_team_properties\n')]
#!/usr/bin/env python3 """ Created on 24 Mar 2021 @author: <NAME> (<EMAIL>) """ from scs_host.sys.host import Host # -------------------------------------------------------------------------------------------------------------------- sim = Host.sim() print(sim)
[ "scs_host.sys.host.Host.sim" ]
[((246, 256), 'scs_host.sys.host.Host.sim', 'Host.sim', ([], {}), '()\n', (254, 256), False, 'from scs_host.sys.host import Host\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Mar 16 13:18:29 2021 @author: dpetrovykh """ from PyQt5 import QtCore, QtWidgets, QtWidgets class MainWindow(QtWidgets.QMainWindow): def __init__(self, parent=None): # Run Initialization of parent class super(MainWindow, self).__init__(parent) # Create a central widget which other functional widgets can be added into self.central_widget = QtWidgets.QStackedWidget() # foramlly add central widget as the centralWidget self.setCentralWidget(self.central_widget) # Create an instance of the login widget, supplying self as the parent optional argument login_widget = LoginWidget(self) # Link the button within the LoginWidget with the self.login function login_widget.button.clicked.connect(self.login) # Add the login_widget to the central widget self.central_widget.addWidget(login_widget) def login(self): # Create instance of logged widget logged_in_widget = LoggedWidget(self) # Add logged iwdget to central widget self.central_widget.addWidget(logged_in_widget) # Make the recently-added logged widget the current widget in the central widget. self.central_widget.setCurrentWidget(logged_in_widget) class LoginWidget(QtWidgets.QWidget): def __init__(self, parent=None): # Run Initializzation of parent class super(LoginWidget, self).__init__(parent) # Create a horizontal layout layout = QtWidgets.QHBoxLayout() # Create a button self.button = QtWidgets.QPushButton('Login') #Add button to the layout layout.addWidget(self.button) # Set the layout to be the current one. self.setLayout(layout) # you might want to do self.button.click.connect(self.parent().login) here class LoggedWidget(QtWidgets.QWidget): def __init__(self, parent=None): super(LoggedWidget, self).__init__(parent) layout = QtWidgets.QHBoxLayout() self.label = QtWidgets.QLabel('logged in!') layout.addWidget(self.label) self.setLayout(layout) if __name__ == '__main__': app = QtWidgets.QApplication([]) window = MainWindow() window.show() app.exec_()
[ "PyQt5.QtWidgets.QLabel", "PyQt5.QtWidgets.QHBoxLayout", "PyQt5.QtWidgets.QPushButton", "PyQt5.QtWidgets.QApplication", "PyQt5.QtWidgets.QStackedWidget" ]
[((2228, 2254), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['[]'], {}), '([])\n', (2250, 2254), False, 'from PyQt5 import QtCore, QtWidgets, QtWidgets\n'), ((446, 472), 'PyQt5.QtWidgets.QStackedWidget', 'QtWidgets.QStackedWidget', ([], {}), '()\n', (470, 472), False, 'from PyQt5 import QtCore, QtWidgets, QtWidgets\n'), ((1561, 1584), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (1582, 1584), False, 'from PyQt5 import QtCore, QtWidgets, QtWidgets\n'), ((1633, 1663), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['"""Login"""'], {}), "('Login')\n", (1654, 1663), False, 'from PyQt5 import QtCore, QtWidgets, QtWidgets\n'), ((2044, 2067), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (2065, 2067), False, 'from PyQt5 import QtCore, QtWidgets, QtWidgets\n'), ((2089, 2119), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['"""logged in!"""'], {}), "('logged in!')\n", (2105, 2119), False, 'from PyQt5 import QtCore, QtWidgets, QtWidgets\n')]
import re def normalize_text(text): result = text.lower() #lower the text even unicode given result = re.sub(r'[^a-z0-9 -]', ' ', result, flags = re.IGNORECASE|re.MULTILINE) result = re.sub(r'( +)', ' ', result, flags = re.IGNORECASE|re.MULTILINE) return result.strip()
[ "re.sub" ]
[((111, 181), 're.sub', 're.sub', (['"""[^a-z0-9 -]"""', '""" """', 'result'], {'flags': '(re.IGNORECASE | re.MULTILINE)'}), "('[^a-z0-9 -]', ' ', result, flags=re.IGNORECASE | re.MULTILINE)\n", (117, 181), False, 'import re\n'), ((196, 259), 're.sub', 're.sub', (['"""( +)"""', '""" """', 'result'], {'flags': '(re.IGNORECASE | re.MULTILINE)'}), "('( +)', ' ', result, flags=re.IGNORECASE | re.MULTILINE)\n", (202, 259), False, 'import re\n')]
from __future__ import nested_scopes, generators, division, absolute_import, with_statement, \ print_function, unicode_literals from . import compatibility compatibility.backport() # noqa import builtins import os # noqa import sys # noqa from io import UnsupportedOperation # noqa from collections import OrderedDict # noqa from unicodedata import normalize # noqa import re # noqa import inspect # noqa from keyword import iskeyword # noqa # region Compatibility Conditionals # The following detects the presence of the typing library try: from typing import Union, Optional, Iterable, Tuple, Any, Callable, AnyStr # noqa except ImportError: Union = Optional = Iterable = Tuple = Any = Callable = AnyStr = None # Before `collections.abc` existed, the definitions we use from this module were in `collections` try: import collections.abc as collections_abc import collections except ImportError: import collections collections_abc = collections # Earlier versions of the `collections` library do not include the `Generator` class, so when this class is missing-- # we employ a workaround. if hasattr(collections_abc, 'Generator'): Generator = collections_abc.Generator else: Generator = type(n for n in (1, 2, 3)) # endregion try: from inspect import signature getargspec = None except ImportError: signature = None try: from inspect import getfullargspec except ImportError: from inspect import getargspec as getfullargspec _Module = type(re) def qualified_name(type_): # type: (Union[type, _Module]) -> str """ >>> print(qualified_name(qualified_name)) qualified_name >>> from serial import model >>> print(qualified_name(model.marshal)) serial.model.marshal """ if hasattr(type_, '__qualname__'): type_name = '.'.join(name_part for name_part in type_.__qualname__.split('.') if name_part[0] != '<') else: type_name = type_.__name__ if isinstance(type_, _Module): if type_name in ( 'builtins', '__builtin__', '__main__', '__init__' ): type_name = None else: if type_.__module__ not in ( 'builtins', '__builtin__', '__main__', '__init__' ): type_name = type_.__module__ + '.' + type_name return type_name def calling_function_qualified_name(depth=1): # type: (int) -> Optional[str] """ >>> def my_function(): return calling_function_qualified_name() >>> print(my_function()) my_function """ if not isinstance(depth, int): depth_representation = repr(depth) raise TypeError( 'The parameter `depth` for `serial.utilities.calling_function_qualified_name` must be an `int`, not' + ( (':\n%s' if '\n' in depth_representation else ' %s.') % depth_representation ) ) stack = inspect.stack() if len(stack) < (depth + 1): return None else: name_list = [] stack = inspect.stack() frame_info = stack[depth] # type: inspect.FrameInfo try: frame_function = frame_info.function except AttributeError: frame_function = frame_info[3] if frame_function != '<module>': try: frame = frame_info.frame except AttributeError: frame = frame_info[0] name_list.append(frame_function) arguments, _, _, frame_locals = inspect.getargvalues(frame) if arguments: argument = arguments[0] argument_value = frame_locals[argument] argument_value_type = type(argument_value) if ( hasattr(argument_value_type, '__name__') and hasattr(argument_value_type, '__module__') and ( (argument_value_type.__name__ not in dir(builtins)) or (getattr(builtins, argument_value_type.__name__) is not argument_value_type) ) ): name_list.append(qualified_name(argument_value_type)) if len(name_list) < 2: try: file_name = frame_info.filename except AttributeError: file_name = frame_info[1] module_name = inspect.getmodulename(file_name) if module_name not in sys.modules: path_parts = list(os.path.split(file_name)) path_parts.pop() while path_parts: parent = path_parts.pop() module_name = parent + '.' + module_name if module_name in sys.modules: break if module_name is None: raise ValueError('The path "%s" is not a python module' % file_name) else: if module_name in sys.modules: qualified_module_name = qualified_name(sys.modules[module_name]) name_list.append(qualified_module_name) return '.'.join(reversed(name_list)) def property_name(string): # type: (str) -> str """ Converts a "camelCased" attribute/property name, or a name which conflicts with a python keyword, to a pep8-compliant property name. >>> print(property_name('theBirdsAndTheBees')) the_birds_and_the_bees >>> print(property_name('FYIThisIsAnAcronym')) fyi_this_is_an_acronym >>> print(property_name('in')) in_ >>> print(property_name('id')) id_ """ pn = re.sub( r'__+', '_', re.sub( r'[^\w]+', '', re.sub( r'([a-zA-Z])([0-9])', r'\1_\2', re.sub( r'([0-9])([a-zA-Z])', r'\1_\2', re.sub( r'([A-Z])([A-Z])([a-z])', r'\1_\2\3', re.sub( r'([a-z])([A-Z])', r'\1_\2', re.sub( r'([^\x20-\x7F]|\s)+', '_', normalize('NFKD', string) ) ) ) ) ) ) ).lower() if iskeyword(pn) or (pn in dir(builtins)): pn += '_' return pn def class_name(string): """ >>> print(class_name('the birds and the bees')) TheBirdsAndTheBees >>> print(class_name('the-birds-and-the-bees')) TheBirdsAndTheBees >>> print(class_name('**the - birds - and - the - bees**')) TheBirdsAndTheBees >>> print(class_name('FYI is an acronym')) FYIIsAnAcronym >>> print(class_name('in-you-go')) InYouGo >>> print(class_name('False')) False_ >>> print(class_name('True')) True_ >>> print(class_name('ABC Acronym')) ABCAcronym """ return camel(string, capitalize=True) def camel(string, capitalize=False): # type: (str, bool) -> str """ >>> print(camel('the birds and the bees')) theBirdsAndTheBees >>> print(camel('the-birds-and-the-bees')) theBirdsAndTheBees >>> print(camel('**the - birds - and - the - bees**')) theBirdsAndTheBees >>> print(camel('FYI is an acronym')) fyiIsAnAcronym >>> print(camel('in-you-go')) inYouGo >>> print(camel('False')) false >>> print(camel('True')) true >>> print(camel('in')) in_ """ string = normalize('NFKD', string) characters = [] if not capitalize: string = string.lower() capitalize_next = capitalize for s in string: if s in 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789': if capitalize_next: if capitalize or characters: s = s.upper() characters.append(s) capitalize_next = False else: capitalize_next = True cn = ''.join(characters) if iskeyword(cn) or (cn in dir(builtins)): cn += '_' return cn def get_source(o): # type: (object) -> str if hasattr(o, '_source') and isinstance(o._source, str): result = o._source else: result = inspect.getsource(o) return result def camel_split(string): # test: (str) -> str """ >>> print('(%s)' % ', '.join("'%s'" % s for s in camel_split('theBirdsAndTheBees'))) ('the', 'Birds', 'And', 'The', 'Bees') >>> print('(%s)' % ', '.join("'%s'" % s for s in camel_split('theBirdsAndTheBees123'))) ('the', 'Birds', 'And', 'The', 'Bees', '123') >>> print('(%s)' % ', '.join("'%s'" % s for s in camel_split('theBirdsAndTheBeesABC123'))) ('the', 'Birds', 'And', 'The', 'Bees', 'ABC', '123') >>> print('(%s)' % ', '.join("'%s'" % s for s in camel_split('the-Birds-And-The-Bees-ABC--123'))) ('the', '-', 'Birds', '-', 'And', '-', 'The', '-', 'Bees', '-', 'ABC', '--', '123') >>> print('(%s)' % ', '.join("'%s'" % s for s in camel_split('THEBirdsAndTheBees'))) ('THE', 'Birds', 'And', 'The', 'Bees') """ words = [] character_type = None acronym = False for s in string: if s in '0123456789': if character_type == 0: words[-1].append(s) else: words.append([s]) character_type = 0 acronym = False elif s in 'abcdefghijklmnopqrstuvwxyz': if character_type == 1: words[-1].append(s) elif character_type == 2: if acronym: words.append([words[-1].pop()] + [s]) else: words[-1].append(s) else: words.append([s]) character_type = 1 acronym = False elif s in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ': if character_type == 2: words[-1].append(s) acronym = True else: words.append([s]) acronym = False character_type = 2 else: if character_type == 3: words[-1].append(s) else: words.append([s]) character_type = 3 return tuple( ''.join(w) for w in words ) def properties_values(o): # type: (object) -> Sequence[Tuple[AnyStr, Any]] for a in dir(o): if a[0] != '_': v = getattr(o, a) if not callable(v): yield a, v UNDEFINED = None class Undefined(object): def __init__(self): if UNDEFINED is not None: raise RuntimeError( '%s may only be defined once.' % repr(self) ) def __repr__(self): return ( 'UNDEFINED' if self.__module__ in ('__main__', 'builtins', '__builtin__', __name__) else '%s.UNDEFINED' % self.__module__ ) def __bool__(self): return False def __hash__(self): return 0 def __eq__(self, other): # type: (Any) -> bool return other is self UNDEFINED = Undefined() def parameters_defaults(function): # type: (Callable) -> OrderedDict """ Returns an ordered dictionary mapping a function's argument names to default values, or `UNDEFINED` in the case of positional arguments. >>> class X(object): ... ... def __init__(self, a, b, c, d=1, e=2, f=3): ... pass ... >>> print(list(parameters_defaults(X.__init__).items())) [('self', UNDEFINED), ('a', UNDEFINED), ('b', UNDEFINED), ('c', UNDEFINED), ('d', 1), ('e', 2), ('f', 3)] """ pd = OrderedDict() if signature is None: spec = getfullargspec(function) i = - 1 for a in spec.args: pd[a] = UNDEFINED for a in reversed(spec.args): try: pd[a] = spec.defaults[i] except IndexError: break i -= 1 else: for pn, p in signature(function).parameters.items(): if p.default is inspect.Parameter.empty: pd[pn] = UNDEFINED else: pd[pn] = p.default return pd def read(data): # type: (Union[str, IOBase, addbase]) -> Any if ( (hasattr(data, 'readall') and callable(data.readall)) or (hasattr(data, 'read') and callable(data.read)) ): if hasattr(data, 'seek') and callable(data.seek): try: data.seek(0) except UnsupportedOperation: pass if hasattr(data, 'readall') and callable(data.readall): try: data = data.readall() except UnsupportedOperation: data = data.read() else: data = data.read() return data else: raise TypeError( '%s is not a file-like object' % repr(data) ) if __name__ == '__main__': import doctest doctest.testmod()
[ "unicodedata.normalize", "inspect.getargvalues", "inspect.getargspec", "inspect.getmodulename", "inspect.signature", "collections.OrderedDict", "inspect.getsource", "os.path.split", "inspect.stack", "keyword.iskeyword", "doctest.testmod" ]
[((2961, 2976), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (2974, 2976), False, 'import inspect\n'), ((7729, 7754), 'unicodedata.normalize', 'normalize', (['"""NFKD"""', 'string'], {}), "('NFKD', string)\n", (7738, 7754), False, 'from unicodedata import normalize\n'), ((11900, 11913), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (11911, 11913), False, 'from collections import OrderedDict\n'), ((13232, 13249), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (13247, 13249), False, 'import doctest\n'), ((3083, 3098), 'inspect.stack', 'inspect.stack', ([], {}), '()\n', (3096, 3098), False, 'import inspect\n'), ((6518, 6531), 'keyword.iskeyword', 'iskeyword', (['pn'], {}), '(pn)\n', (6527, 6531), False, 'from keyword import iskeyword\n'), ((8231, 8244), 'keyword.iskeyword', 'iskeyword', (['cn'], {}), '(cn)\n', (8240, 8244), False, 'from keyword import iskeyword\n'), ((8467, 8487), 'inspect.getsource', 'inspect.getsource', (['o'], {}), '(o)\n', (8484, 8487), False, 'import inspect\n'), ((11955, 11979), 'inspect.getargspec', 'getfullargspec', (['function'], {}), '(function)\n', (11969, 11979), True, 'from inspect import getargspec as getfullargspec\n'), ((3561, 3588), 'inspect.getargvalues', 'inspect.getargvalues', (['frame'], {}), '(frame)\n', (3581, 3588), False, 'import inspect\n'), ((4445, 4477), 'inspect.getmodulename', 'inspect.getmodulename', (['file_name'], {}), '(file_name)\n', (4466, 4477), False, 'import inspect\n'), ((4561, 4585), 'os.path.split', 'os.path.split', (['file_name'], {}), '(file_name)\n', (4574, 4585), False, 'import os\n'), ((12253, 12272), 'inspect.signature', 'signature', (['function'], {}), '(function)\n', (12262, 12272), False, 'from inspect import signature\n'), ((6351, 6376), 'unicodedata.normalize', 'normalize', (['"""NFKD"""', 'string'], {}), "('NFKD', string)\n", (6360, 6376), False, 'from unicodedata import normalize\n')]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'cnheider' from pynput import keyboard # import keyboard import utilities as U COMBINATIONS = [ {keyboard.Key.shift, keyboard.Key.alt, keyboard.KeyCode(char='s')}, {keyboard.Key.shift, keyboard.Key.alt, keyboard.KeyCode(char='S')}, ] CALLBACKS = [] # The currently active modifiers current = set() def add_early_stopping_key_combination(callback, key='ctrl+shift+s'): # keyboard.add_hotkey(key, callback) CALLBACKS.append(callback) U.sprint(f'\n\nPress any of:\n{COMBINATIONS}\n for early stopping\n', color='red', bold=True, highlight=True) print('') return keyboard.Listener(on_press=on_press, on_release=on_release) def on_press(key): if any([key in COMBO for COMBO in COMBINATIONS]): current.add(key) if any(all(k in current for k in COMBO) for COMBO in COMBINATIONS): for callback in CALLBACKS: callback() def on_release(key): if any([key in COMBO for COMBO in COMBINATIONS]): current.remove(key)
[ "utilities.sprint", "pynput.keyboard.KeyCode", "pynput.keyboard.Listener" ]
[((510, 623), 'utilities.sprint', 'U.sprint', (['f"""\n\nPress any of:\n{COMBINATIONS}\n for early stopping\n"""'], {'color': '"""red"""', 'bold': '(True)', 'highlight': '(True)'}), '(f"""\n\nPress any of:\n{COMBINATIONS}\n for early stopping\n""", color=\n \'red\', bold=True, highlight=True)\n', (518, 623), True, 'import utilities as U\n'), ((652, 711), 'pynput.keyboard.Listener', 'keyboard.Listener', ([], {'on_press': 'on_press', 'on_release': 'on_release'}), '(on_press=on_press, on_release=on_release)\n', (669, 711), False, 'from pynput import keyboard\n'), ((200, 226), 'pynput.keyboard.KeyCode', 'keyboard.KeyCode', ([], {'char': '"""s"""'}), "(char='s')\n", (216, 226), False, 'from pynput import keyboard\n'), ((270, 296), 'pynput.keyboard.KeyCode', 'keyboard.KeyCode', ([], {'char': '"""S"""'}), "(char='S')\n", (286, 296), False, 'from pynput import keyboard\n')]
#!/usr/bin/env python3 import sys import json CHUNK_SIZE = 4*32*1024 def wrong_written_size(x): out = 0 while x >= 0: out += x x -= CHUNK_SIZE return out if __name__ == '__main__': with open(sys.argv[1], 'rb') as f: data = f.read() print(sys.argv[1]) try: json.loads(data) except json.JSONDecodeError as e: exc = e for margin in range(10): if len(data) == wrong_written_size(e.pos + margin): print(margin, exc, len(data), e.pos, data[e.pos:][:10], data[:10]) sys.exit(0) sys.exit(1)
[ "json.loads", "sys.exit" ]
[((603, 614), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (611, 614), False, 'import sys\n'), ((319, 335), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (329, 335), False, 'import json\n'), ((586, 597), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (594, 597), False, 'import sys\n')]
from malcolm.yamlutil import check_yaml_names, make_block_creator aravisGigE_driver_block = make_block_creator(__file__, "aravisGigE_driver_block.yaml") aravisGigE_runnable_block = make_block_creator( __file__, "aravisGigE_runnable_block.yaml" ) aravisGigE_manager_block = make_block_creator(__file__, "aravisGigE_manager_block.yaml") __all__ = check_yaml_names(globals())
[ "malcolm.yamlutil.make_block_creator" ]
[((93, 153), 'malcolm.yamlutil.make_block_creator', 'make_block_creator', (['__file__', '"""aravisGigE_driver_block.yaml"""'], {}), "(__file__, 'aravisGigE_driver_block.yaml')\n", (111, 153), False, 'from malcolm.yamlutil import check_yaml_names, make_block_creator\n'), ((182, 244), 'malcolm.yamlutil.make_block_creator', 'make_block_creator', (['__file__', '"""aravisGigE_runnable_block.yaml"""'], {}), "(__file__, 'aravisGigE_runnable_block.yaml')\n", (200, 244), False, 'from malcolm.yamlutil import check_yaml_names, make_block_creator\n'), ((278, 339), 'malcolm.yamlutil.make_block_creator', 'make_block_creator', (['__file__', '"""aravisGigE_manager_block.yaml"""'], {}), "(__file__, 'aravisGigE_manager_block.yaml')\n", (296, 339), False, 'from malcolm.yamlutil import check_yaml_names, make_block_creator\n')]
from werkzeug.exceptions import NotFound from . import bp_obj, views from project.core.models import User @bp_obj.route('/') def index(): return 'Welcome home.' @bp_obj.route('/users/<username>') def get_profile_info(username): user = User.query.filter(User.username == username).first() if user is None: raise NotFound('User not found.') return views.json_serialise_user(user)
[ "project.core.models.User.query.filter", "werkzeug.exceptions.NotFound" ]
[((339, 366), 'werkzeug.exceptions.NotFound', 'NotFound', (['"""User not found."""'], {}), "('User not found.')\n", (347, 366), False, 'from werkzeug.exceptions import NotFound\n'), ((250, 294), 'project.core.models.User.query.filter', 'User.query.filter', (['(User.username == username)'], {}), '(User.username == username)\n', (267, 294), False, 'from project.core.models import User\n')]
import time from animation import * from asteroidField import * from background import * from loader import * from physics import * from player import * from powerup import * import pygame from pygame.locals import * from rotatingMenu_img import * from spacemenu import * from starField import * # teclas dos jogadores default keyPresset1 = [K_LEFT,K_RIGHT,K_UP,K_DOWN, K_SPACE, K_m] keyPresset2 = [K_a, K_d, K_w, K_s, K_x, K_r] pygame.init() def game(numkills,nave1,nave2): SCREENSIZE = [1024,768] screen = pygame.display.set_mode(SCREENSIZE,pygame.FULLSCREEN) ## uncomment for debug #screen = pygame.display.set_mode(SCREENSIZE) pygame.mouse.set_visible(0) clock = pygame.time.Clock() #init background background = Background(screen,'galaxy.jpg') #init efeito campo estrelado e asteroids starfield = StarField(screen) asteroidField = AsteroidField(screen) #init musica rand = random.randrange(0,2) if rand == 0: load_music('After Burner.mp3') else: load_music('Spybreak.mp3') #load_music('Gundam.mp3') #init players player1 = Player((200,SCREENSIZE[1]/2),keyPresset1,1,nave1,numkills) playerSprite1 = pygame.sprite.RenderPlain((player1)) player1.spin(90,3) player2 = Player((SCREENSIZE[0]-200,SCREENSIZE[1]/2),keyPresset2,2,nave2,numkills) playerSprite2 = pygame.sprite.RenderPlain((player2)) player2.spin(90,1) #powerup stuff variables powerups_on_screen = False done = False retval = 0 powerup_available = 0 #vars apenas para animacao do rapaz no canto do ecra i = random.randrange(1,4) pickup_timer = 0 while not done: clock.tick(40) #se nao ha asteroides, respawn current_asteroids = len(asteroidField.asteroidSprites) if current_asteroids <= 0: current_asteroids = asteroidField.refresh(asteroidField.num_asteroids +1) if pickup_timer != 0: elapsed = round(time.clock()) ##desenhar informacoes do jogadores font = pygame.font.SysFont("consola", 20) ScorePanel1 ="Player 1 - Lives: "+str(player1.statistics[0])+" "+"Score: "+str(player1.statistics[3]) scorePlayer1 = font.render(ScorePanel1, True, (255,255,255)) if nave2 != 0: ScorePanel2 ="Player 2 - Lives: "+str(player2.statistics[0])+" Score: "+str(player2.statistics[3]) scorePlayer2 = font.render(ScorePanel2, True, (255,255,255)) # desenhar informacoes de powerups disponiveis font = pygame.font.SysFont("consola", 40) PowerupPanel = "" if powerups_on_screen == False: poweruppanel = font.render(PowerupPanel, True, (0,255,0)) ############################# ##MOVER JOGADORES #se esta so um jogador if nave2 == 0: for event in pygame.event.get(): if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: done = True elif event.key == keyPresset1[0]: player1.dx = -10 player1.spin(90,1) elif event.key == keyPresset1[1]: player1.dx = 10 player1.spin(90,3) elif event.key == keyPresset1[2]: player1.dy = -10 player1.spin(90,0) elif event.key == keyPresset1[3]: player1.dy = 10 player1.spin(90,2) elif event.type == KEYUP: if event.key == keyPresset1[0]: player1.dx = -3 elif event.key == keyPresset1[1]: player1.dx = 3 elif event.key == keyPresset1[2]: player1.dy = -3 elif event.key == keyPresset1[3]: player1.dy = 3 elif event.key == keyPresset1[5]: player1.changeWeapon() # ha dois jogadores a jogar, apanhar teclas todas else: for event in pygame.event.get(): if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: done = True elif event.key == keyPresset1[0]: player1.dx = -10 player1.spin(90,1) elif event.key == keyPresset1[1]: player1.dx = 10 player1.spin(90,3) elif event.key == keyPresset1[2]: player1.dy = -10 player1.spin(90,0) elif event.key == keyPresset1[3]: player1.dy = 10 player1.spin(90,2) elif event.key == keyPresset2[0]: player2.dx = -10 player2.spin(90,1) elif event.key == keyPresset2[1]: player2.dx = 10 player2.spin(90,3) elif event.key == keyPresset2[2]: player2.dy = -10 player2.spin(90,0) elif event.key == keyPresset2[3]: player2.dy = 10 player2.spin(90,2) elif event.type == KEYUP: if event.key == keyPresset1[0]: player1.dx = -3 elif event.key == keyPresset1[1]: player1.dx = 3 elif event.key == keyPresset1[2]: player1.dy = -3 elif event.key == keyPresset1[3]: player1.dy = 3 elif event.key == keyPresset1[5]: player1.changeWeapon() elif event.key == keyPresset2[0]: player2.dx = -3 elif event.key == keyPresset2[1]: player2.dx = 3 elif event.key == keyPresset2[2]: player2.dy = -3 elif event.key == keyPresset2[3]: player2.dy = 3 elif event.key == keyPresset2[5]: player2.changeWeapon() background.update() starfield.update() #calcular tempo de activacao de um powerup novo e o tipo #se estiver em single player so ha powerup de armas activate_powerups = random.randrange(0,200) if nave2 != 0: powerup_type = random.randrange(1,4) else: powerup_type = 2 if activate_powerups == 150: if powerups_on_screen == False: powerup_available = powerup_type if (powerup_type == 1): PowerupPanel = "Health Powerup Available!" poweruppanel = font.render(PowerupPanel, True, (0,255,0)) elif powerup_type == 2: PowerupPanel = "Weapon Powerup Available!" poweruppanel = font.render(PowerupPanel, True, (255,0,0)) else: PowerupPanel = "Mines Available!!" poweruppanel = font.render(PowerupPanel, True, (255,0,0)) powerup = Powerup(powerup_available,SCREENSIZE) powerupSprite = pygame.sprite.RenderPlain((powerup)) powerups_on_screen = True ## POWERUP JA ESTA NO ECRA ######################## #calculos de intersects #Calcular colisoes de lasers entre jogadores kill = lasers(player1,player2,playerSprite1,playerSprite2,asteroidField) #se matou algum jogador, sai if kill == 1: done = True kill = asteroids(player1,player2,playerSprite1,playerSprite2,asteroidField) #se matou algum jogador, sai if kill == 1: done = True #apanhar powerups if powerups_on_screen == True: retval = pickup_powerup(powerup,powerupSprite,player1,playerSprite1,powerup_available) if retval == 1: retval = 0 powerups_on_screen = False if powerup.tipo == 2 and powerup.damagefactor == 4: pickup_timer = round(time.clock()) elapsed = pickup_timer else: retval = pickup_powerup(powerup,powerupSprite,player2,playerSprite2,powerup_available) if retval == 1: retval = 0 powerups_on_screen = False if powerup.tipo == 2 and powerup.damagefactor == 4: pickup_timer = round(time.clock()) elapsed = pickup_timer ############################# # Desenhar #desenhar jogador 1 screen.blit(scorePlayer1, (10, 740)) playerSprite1.update(screen) playerSprite1.draw(screen) player1.draw_health(screen) player1.draw_stats(screen) #desenhar jogador 2 if nave2 != 0: screen.blit(scorePlayer2, (10, 750)) playerSprite2.update(screen) playerSprite2.draw(screen) player2.draw_health(screen) player2.draw_stats(screen) #powerups screen.blit(poweruppanel, (350, 10)) if powerups_on_screen == True: powerupSprite.draw(screen) #desenhar powerup_pickups for sprite in weapon_pickups: sprite.render(screen,False) for sprite in health_pickups: sprite.render(screen,False) #desenhar asteroides asteroidField.update() #desenhar explosoes for sprite in explosoes: sprite.render(screen,False) #desenhar humor pic if pickup_timer != 0: if (elapsed - pickup_timer) < 1.5: toasty_pic, toasty_rect = load_image("toasty"+str(i)+".PNG", -1) screen.blit(toasty_pic,(885,650)) else: pickup_timer = 0 #Alterei o random pois o grau de aleatoriedade eh baixo #desta forma aparecemos todos mais vezes :) listagem=[1,2,3,4] random.shuffle(listagem) random.shuffle(listagem) i = listagem[0] pygame.display.flip() ##FIM DO WHILE ##################################### stop_music() pygame.display.set_mode([800,600]) return player1,player2 def main(): pygame.init() SCREENSIZE = [800,600] screen = pygame.display.set_mode(SCREENSIZE) pygame.display.set_caption("Space War Evolved") pygame.mouse.set_visible(0) #init musica load_music('menu.mp3') clock = pygame.time.Clock() SP, rect = load_image("SP.png", -1) MP, rect2 = load_image("MP.png", -1) S, rect3 = load_image("S.png", -1) H, rect4 = load_image("H.png", -1) A, rect5 = load_image("A.png", -1) E, rect6 = load_image("E.png", -1) SP_red, rect = load_image("SP_red_35_433.png", -1) MP_red, rect = load_image("MP_red_93_433.png", -1) S_red, rect = load_image("S_red_151_478.png", -1) H_red, rect = load_image("H_red_93_478.png", -1) A_red, rect = load_image("A_red_151_433.png", -1) E_red, rect = load_image("E_red_35_478.png", -1) extra, rect = load_image("extra.png", -1) multi = [] multi_images = load_sliced_sprites(221,34,'multi_player_anim_221x34.png') single = [] single_images = load_sliced_sprites(243,34,'single_anim_243x34.png') help = [] help_images = load_sliced_sprites(74,35,'help_anim_74x35.png') about = [] about_images = load_sliced_sprites(112,29,'about_anim_112x29.png') exit = [] exit_images = load_sliced_sprites(74,28,'exit_anim_74x28.png') setkeys = [] setkeys_images = load_sliced_sprites(179,29,'setkeys_anim_179x29.png') jiproj = [] jiproj_images = load_sliced_sprites(128,160,'ji_proj_128x160.png') jiproj.append(AnimatedSprite(jiproj_images,129,31)) autores = [] autores_images = load_sliced_sprites(111,160,'autores.png') autores.append(AnimatedSprite(autores_images,129,217)) moverCursor = load_sound('moverCursor.wav') moverCursor.set_volume(0.2) clock = pygame.time.Clock() menu = RotatingMenu(x=520, y=295, radius=160, arc=pi, defaultAngle=pi/2.0) background = Background(screen,'Stargate_menu.png') menu.addItem(MenuItem(H)) menu.addItem(MenuItem(S)) menu.addItem(MenuItem(SP)) menu.addItem(MenuItem(MP)) menu.addItem(MenuItem(A)) menu.addItem(MenuItem(E)) menu.selectItem(2) #Loop while True: #Handle events events = pygame.event.get() for event in events: if event.type == pygame.QUIT: return False if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT: moverCursor.play() menu.selectItem(menu.selectedItemNumber + 1) if event.key == pygame.K_RIGHT: moverCursor.play() menu.selectItem(menu.selectedItemNumber - 1) if event.key == pygame.K_RETURN: if menu.selectedItemNumber == 0: option2() elif menu.selectedItemNumber == 1: option4() elif menu.selectedItemNumber == 2: option0() elif menu.selectedItemNumber == 3: option1() elif menu.selectedItemNumber == 4: option3() elif menu.selectedItemNumber == 5: option5() return False #Update stuff background.update() menu.update() for sprite in jiproj: sprite.render(screen,True) for sprite in autores: sprite.render(screen,True) screen.blit(extra, (124,24)) if menu.selectedItemNumber == 0: single = [] multi = [] exit = [] about = [] setkeys = [] screen.blit(H_red, (93,478)) help.append(AnimatedSprite(help_images,490,280)) elif menu.selectedItemNumber == 1: single = [] help = [] exit = [] about = [] multi = [] screen.blit(S_red, (151,478)) setkeys.append(AnimatedSprite(setkeys_images,435,280)) elif menu.selectedItemNumber == 2: help = [] multi = [] exit = [] about = [] setkeys = [] screen.blit(SP_red, (35,433)) single.append(AnimatedSprite(single_images,403,280)) elif menu.selectedItemNumber == 3: single = [] help = [] exit = [] about = [] setkeys = [] screen.blit(MP_red, (93,433)) multi.append(AnimatedSprite(multi_images,410,280)) elif menu.selectedItemNumber == 4: single = [] multi = [] exit = [] help = [] setkeys = [] screen.blit(A_red, (151,433)) about.append(AnimatedSprite(about_images,470,280)) elif menu.selectedItemNumber == 5: single = [] multi = [] help = [] about = [] setkeys = [] screen.blit(E_red, (35,478)) exit.append(AnimatedSprite(exit_images,490,280)) for sprite in multi: sprite.render(screen,True) for sprite in single: sprite.render(screen,True) for sprite in about: sprite.render(screen,True) for sprite in exit: sprite.render(screen,True) for sprite in help: sprite.render(screen,True) for sprite in setkeys: sprite.render(screen,True) #Draw stuff #display.fill((0,0,0)) menu.draw(screen) pygame.display.flip() #Show the updated scene clock.tick(fpsLimit) #Wait a little if __name__ == "__main__": main()
[ "pygame.font.SysFont", "pygame.mouse.set_visible", "pygame.display.set_mode", "pygame.event.get", "pygame.init", "pygame.display.flip", "time.clock", "pygame.sprite.RenderPlain", "pygame.display.set_caption", "pygame.time.Clock" ]
[((432, 445), 'pygame.init', 'pygame.init', ([], {}), '()\n', (443, 445), False, 'import pygame\n'), ((532, 586), 'pygame.display.set_mode', 'pygame.display.set_mode', (['SCREENSIZE', 'pygame.FULLSCREEN'], {}), '(SCREENSIZE, pygame.FULLSCREEN)\n', (555, 586), False, 'import pygame\n'), ((667, 694), 'pygame.mouse.set_visible', 'pygame.mouse.set_visible', (['(0)'], {}), '(0)\n', (691, 694), False, 'import pygame\n'), ((707, 726), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (724, 726), False, 'import pygame\n'), ((1227, 1261), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', (['player1'], {}), '(player1)\n', (1252, 1261), False, 'import pygame\n'), ((1394, 1428), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', (['player2'], {}), '(player2)\n', (1419, 1428), False, 'import pygame\n'), ((10848, 10883), 'pygame.display.set_mode', 'pygame.display.set_mode', (['[800, 600]'], {}), '([800, 600])\n', (10871, 10883), False, 'import pygame\n'), ((10935, 10948), 'pygame.init', 'pygame.init', ([], {}), '()\n', (10946, 10948), False, 'import pygame\n'), ((10989, 11024), 'pygame.display.set_mode', 'pygame.display.set_mode', (['SCREENSIZE'], {}), '(SCREENSIZE)\n', (11012, 11024), False, 'import pygame\n'), ((11029, 11076), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Space War Evolved"""'], {}), "('Space War Evolved')\n", (11055, 11076), False, 'import pygame\n'), ((11081, 11108), 'pygame.mouse.set_visible', 'pygame.mouse.set_visible', (['(0)'], {}), '(0)\n', (11105, 11108), False, 'import pygame\n'), ((11165, 11184), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (11182, 11184), False, 'import pygame\n'), ((12706, 12725), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (12723, 12725), False, 'import pygame\n'), ((2087, 2121), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""consola"""', '(20)'], {}), "('consola', 20)\n", (2106, 2121), False, 'import pygame\n'), ((2579, 2613), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""consola"""', '(40)'], {}), "('consola', 40)\n", (2598, 2613), False, 'import pygame\n'), ((10734, 10755), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (10753, 10755), False, 'import pygame\n'), ((13132, 13150), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (13148, 13150), False, 'import pygame\n'), ((16684, 16705), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (16703, 16705), False, 'import pygame\n'), ((2895, 2913), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2911, 2913), False, 'import pygame\n'), ((4269, 4287), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (4285, 4287), False, 'import pygame\n'), ((2013, 2025), 'time.clock', 'time.clock', ([], {}), '()\n', (2023, 2025), False, 'import time\n'), ((7632, 7666), 'pygame.sprite.RenderPlain', 'pygame.sprite.RenderPlain', (['powerup'], {}), '(powerup)\n', (7657, 7666), False, 'import pygame\n'), ((8596, 8608), 'time.clock', 'time.clock', ([], {}), '()\n', (8606, 8608), False, 'import time\n'), ((9022, 9034), 'time.clock', 'time.clock', ([], {}), '()\n', (9032, 9034), False, 'import time\n')]
# -*- coding: utf-8 -*- import ctypes import os from collections import namedtuple from ctypes import POINTER from six.moves import range from .xdo import libX11 as _libX11 from .xdo import libxdo as _libxdo from .xdo import ( # noqa CURRENTWINDOW, SEARCH_CLASS, SEARCH_CLASSNAME, SEARCH_DESKTOP, SEARCH_NAME, SEARCH_ONLYVISIBLE, SEARCH_PID, SEARCH_SCREEN, SEARCH_TITLE, Atom, Screen, XdoException, XErrorHandler, charcodemap_t, window_t, xdo_search_t) mouse_location = namedtuple('mouse_location', 'x,y,screen_num') mouse_location2 = namedtuple('mouse_location2', 'x,y,screen_num,window') window_location = namedtuple('window_location', 'x,y,screen') window_size = namedtuple('window_size', 'width,height') input_mask = namedtuple('input_mask', 'shift,lock,control,mod1,mod2,mod3,mod4,mod5') # noqa # Mouse button constants MOUSE_LEFT = 1 MOUSE_MIDDLE = 2 MOUSE_RIGHT = 3 MOUSE_WHEEL_UP = 4 MOUSE_WHEEL_DOWN = 5 # Keyboard modifiers MOD_Shift = 1 << 0 MOD_Lock = 1 << 1 MOD_Control = 1 << 2 MOD_Mod1 = 1 << 3 MOD_Mod2 = 1 << 4 MOD_Mod3 = 1 << 5 MOD_Mod4 = 1 << 6 MOD_Mod5 = 1 << 7 def _gen_input_mask(mask): """Generate input mask from bytemask""" return input_mask( shift=bool(mask & MOD_Shift), lock=bool(mask & MOD_Lock), control=bool(mask & MOD_Control), mod1=bool(mask & MOD_Mod1), mod2=bool(mask & MOD_Mod2), mod3=bool(mask & MOD_Mod3), mod4=bool(mask & MOD_Mod4), mod5=bool(mask & MOD_Mod5)) class XError(Exception): pass class Xdo(object): def __init__(self, display=None): if display is None: display = os.environ.get('DISPLAY', '') display = display.encode('utf-8') self._xdo = _libxdo.xdo_new(display) def _handle_x_error(evt): # todo: handle errors in a nicer way, eg. try getting error message raise XError('Event: {}'.format(evt)) self._error_handler = XErrorHandler(_handle_x_error) _libX11.XSetErrorHandler(self._error_handler) @classmethod def version(cls): return _libxdo.xdo_version() @classmethod def version_info(cls): return tuple(int(x) for x in cls.version().split(b'.')) def move_mouse(self, x, y, screen=0): """ Move the mouse to a specific location. :param x: the target X coordinate on the screen in pixels. :param y: the target Y coordinate on the screen in pixels. :param screen: the screen (number) you want to move on. """ # todo: apparently the "screen" argument is not behaving properly # and sometimes even making the interpreter crash.. # Figure out why (changed API / using wrong header?) # >>> xdo.move_mouse(3000,200,1) # X Error of failed request: BadWindow (invalid Window parameter) # Major opcode of failed request: 41 (X_WarpPointer) # Resource id in failed request: 0x2a4fca0 # Serial number of failed request: 25 # Current serial number in output stream: 26 # Just to be safe.. # screen = 0 x = ctypes.c_int(x) y = ctypes.c_int(y) screen = ctypes.c_int(screen) _libxdo.xdo_move_mouse(self._xdo, x, y, screen) def move_mouse_relative_to_window(self, window, x, y): """ Move the mouse to a specific location relative to the top-left corner of a window. :param x: the target X coordinate on the screen in pixels. :param y: the target Y coordinate on the screen in pixels. """ _libxdo.xdo_move_mouse_relative_to_window( self._xdo, ctypes.c_ulong(window), x, y) def move_mouse_relative(self, x, y): """ Move the mouse relative to it's current position. :param x: the distance in pixels to move on the X axis. :param y: the distance in pixels to move on the Y axis. """ _libxdo.xdo_move_mouse_relative(self._xdo, x, y) def mouse_down(self, window, button): """ Send a mouse press (aka mouse down) for a given button at the current mouse location. :param window: The window you want to send the event to or CURRENTWINDOW :param button: The mouse button. Generally, 1 is left, 2 is middle, 3 is right, 4 is wheel up, 5 is wheel down. """ _libxdo.xdo_mouse_down( self._xdo, ctypes.c_ulong(window), ctypes.c_int(button)) def mouse_up(self, window, button): """ Send a mouse release (aka mouse up) for a given button at the current mouse location. :param window: The window you want to send the event to or CURRENTWINDOW :param button: The mouse button. Generally, 1 is left, 2 is middle, 3 is right, 4 is wheel up, 5 is wheel down. """ _libxdo.xdo_mouse_up( self._xdo, ctypes.c_ulong(window), ctypes.c_int(button)) def get_mouse_location(self): """ Get the current mouse location (coordinates and screen number). :return: a namedtuple with ``x``, ``y`` and ``screen_num`` fields """ x = ctypes.c_int(0) y = ctypes.c_int(0) screen_num = ctypes.c_int(0) _libxdo.xdo_get_mouse_location( self._xdo, ctypes.byref(x), ctypes.byref(y), ctypes.byref(screen_num)) return mouse_location(x.value, y.value, screen_num.value) def get_window_at_mouse(self): """ Get the window the mouse is currently over """ window_ret = ctypes.c_ulong(0) _libxdo.xdo_get_window_at_mouse(self._xdo, ctypes.byref(window_ret)) return window_ret.value def get_mouse_location2(self): """ Get all mouse location-related data. :return: a namedtuple with ``x``, ``y``, ``screen_num`` and ``window`` fields """ x = ctypes.c_int(0) y = ctypes.c_int(0) screen_num_ret = ctypes.c_ulong(0) window_ret = ctypes.c_ulong(0) _libxdo.xdo_get_mouse_location2( self._xdo, ctypes.byref(x), ctypes.byref(y), ctypes.byref(screen_num_ret), ctypes.byref(window_ret)) return mouse_location2(x.value, y.value, screen_num_ret.value, window_ret.value) def wait_for_mouse_move_from(self, origin_x, origin_y): """ Wait for the mouse to move from a location. This function will block until the condition has been satisified. :param origin_x: the X position you expect the mouse to move from :param origin_y: the Y position you expect the mouse to move from """ _libxdo.xdo_wait_for_mouse_move_from(self._xdo, origin_x, origin_y) def wait_for_mouse_move_to(self, dest_x, dest_y): """ Wait for the mouse to move to a location. This function will block until the condition has been satisified. :param dest_x: the X position you expect the mouse to move to :param dest_y: the Y position you expect the mouse to move to """ _libxdo.xdo_wait_for_mouse_move_from(self._xdo, dest_x, dest_y) def click_window(self, window, button): """ Send a click for a specific mouse button at the current mouse location. :param window: The window you want to send the event to or CURRENTWINDOW :param button: The mouse button. Generally, 1 is left, 2 is middle, 3 is right, 4 is wheel up, 5 is wheel down. """ _libxdo.xdo_click_window(self._xdo, window, button) def click_window_multiple(self, window, button, repeat=2, delay=100000): """ Send a one or more clicks for a specific mouse button at the current mouse location. :param window: The window you want to send the event to or CURRENTWINDOW :param button: The mouse button. Generally, 1 is left, 2 is middle, 3 is right, 4 is wheel up, 5 is wheel down. :param repeat: number of repetitions (default: 2) :param delay: delay between clicks, in microseconds (default: 100k) """ _libxdo.xdo_click_window_multiple( self._xdo, window, button, repeat, delay) def enter_text_window(self, window, string, delay=12000): """ Type a string to the specified window. If you want to send a specific key or key sequence, such as "alt+l", you want instead ``send_keysequence_window(...)``. :param window: The window you want to send keystrokes to or CURRENTWINDOW :param string: The string to type, like "Hello world!" :param delay: The delay between keystrokes in microseconds. 12000 is a decent choice if you don't have other plans. """ return _libxdo.xdo_enter_text_window(self._xdo, window, string, delay) def send_keysequence_window(self, window, keysequence, delay=12000): """ Send a keysequence to the specified window. This allows you to send keysequences by symbol name. Any combination of X11 KeySym names separated by '+' are valid. Single KeySym names are valid, too. Examples: "l" "semicolon" "alt+Return" "Alt_L+Tab" If you want to type a string, such as "Hello world." you want to instead use xdo_enter_text_window. :param window: The window you want to send the keysequence to or CURRENTWINDOW :param keysequence: The string keysequence to send. :param delay: The delay between keystrokes in microseconds. """ _libxdo.xdo_send_keysequence_window( self._xdo, window, keysequence, delay) def send_keysequence_window_up(self, window, keysequence, delay=12000): """Send key release (up) events for the given key sequence""" _libxdo.xdo_send_keysequence_window_up( self._xdo, window, keysequence, ctypes.c_ulong(delay)) def send_keysequence_window_down(self, window, keysequence, delay=12000): """Send key press (down) events for the given key sequence""" _libxdo.xdo_send_keysequence_window_down( self._xdo, window, keysequence, ctypes.c_ulong(delay)) def send_keysequence_window_list_do( self, window, keys, pressed=1, modifier=None, delay=120000): """ Send a series of keystrokes. :param window: The window to send events to or CURRENTWINDOW :param keys: The array of charcodemap_t entities to send. :param pressed: 1 for key press, 0 for key release. :param modifier: Pointer to integer to record the modifiers activated by the keys being pressed. If NULL, we don't save the modifiers. :param delay: The delay between keystrokes in microseconds. """ # todo: how to properly use charcodes_t in a nice way? _libxdo.xdo_send_keysequence_window_list_do( self._xdo, window, keys, len(keys), pressed, modifier, delay) def get_active_keys_to_keycode_list(self): """Get a list of active keys. Uses XQueryKeymap""" try: _libxdo.xdo_get_active_keys_to_keycode_list except AttributeError: # Apparently, this was implemented in a later version.. raise NotImplementedError() keys = POINTER(charcodemap_t) nkeys = ctypes.c_int(0) _libxdo.xdo_get_active_keys_to_keycode_list( self._xdo, ctypes.byref(keys), ctypes.byref(nkeys)) # todo: make sure this returns a list of charcodemap_t! return keys.value def wait_for_window_map_state(self, window, state): """ Wait for a window to have a specific map state. State possibilities: IsUnmapped - window is not displayed. IsViewable - window is mapped and shown (though may be clipped by windows on top of it) IsUnviewable - window is mapped but a parent window is unmapped. :param window: the window you want to wait for. :param state: the state to wait for. """ _libxdo.xdo_wait_for_window_map_state(self._xdo, window, state) def wait_for_window_size(self, window, width, height, flags, to_or_from): _libxdo.xdo_wait_for_window_size(self._xdo) def wait_for_window_size_to(self, window, width, height, flags=0): return self.wait_for_window_size(window, width, height, flags, 0) def wait_for_window_size_from(self, window, width, height, flags=0): return self.wait_for_window_size(window, width, height, flags, 1) def move_window(self, window, x, y): """ Move a window to a specific location. The top left corner of the window will be moved to the x,y coordinate. :param wid: the window to move :param x: the X coordinate to move to. :param y: the Y coordinate to move to. """ _libxdo.xdo_move_window(self._xdo, window, x, y) def translate_window_with_sizehint(self, window, width, height): """ Apply a window's sizing hints (if any) to a given width and height. This function wraps XGetWMNormalHints() and applies any resize increment and base size to your given width and height values. :param window: the window to use :param width: the unit width you want to translate :param height: the unit height you want to translate :return: (width, height) """ width_ret = ctypes.c_uint(0) height_ret = ctypes.c_uint(0) _libxdo.xdo_translate_window_with_sizehint( self._xdo, window, width, height, ctypes.byref(width_ret), ctypes.byref(height_ret)) return width_ret.value, height_ret.value def set_window_size(self, window, w, h, flags=0): """ Change the window size. :param wid: the window to resize :param w: the new desired width :param h: the new desired height :param flags: if 0, use pixels for units. If SIZE_USEHINTS, then the units will be relative to the window size hints. """ _libxdo.xdo_set_window_size(self._xdo, window, w, h, flags) def set_window_property(self, window, name, value): """ Change a window property. Example properties you can change are WM_NAME, WM_ICON_NAME, etc. :param wid: The window to change a property of. :param name: the string name of the property. :param value: the string value of the property. """ _libxdo.xdo_set_window_property(self._xdo, window, name, value) def set_window_class(self, window, name, class_): """ Change the window's classname and or class. :param name: The new class name. If ``None``, no change. :param class_: The new class. If ``None``, no change. """ _libxdo.xdo_set_window_class(self._xdo, window, name, class_) def set_window_urgency(self, window, urgency): """Sets the urgency hint for a window""" _libxdo.xdo_set_window_urgency(self._xdo, window, urgency) def set_window_override_redirect(self, window, override_redirect): """ Set the override_redirect value for a window. This generally means whether or not a window manager will manage this window. If you set it to 1, the window manager will usually not draw borders on the window, etc. If you set it to 0, the window manager will see it like a normal application window. """ _libxdo.xdo_set_window_override_redirect( self._xdo, window, override_redirect) def focus_window(self, window): """ Focus a window. :see: xdo_activate_window :param wid: the window to focus. """ _libxdo.xdo_focus_window(self._xdo, window) def raise_window(self, window): """ Raise a window to the top of the window stack. This is also sometimes termed as bringing the window forward. :param wid: The window to raise. """ _libxdo.xdo_raise_window(self._xdo, window) def get_focused_window(self): """ Get the window currently having focus. :param window_ret: Pointer to a window where the currently-focused window will be stored. """ window_ret = window_t(0) _libxdo.xdo_get_focused_window(self._xdo, ctypes.byref(window_ret)) return window_ret.value def wait_for_window_focus(self, window, want_focus): """ Wait for a window to have or lose focus. :param window: The window to wait on :param want_focus: If 1, wait for focus. If 0, wait for loss of focus. """ _libxdo.xdo_wait_for_window_focus(self._xdo, window, want_focus) def get_pid_window(self, window): """ Get the PID owning a window. Not all applications support this. It looks at the ``_NET_WM_PID`` property of the window. :param window: the window to query. :return: the process id or 0 if no pid found. """ # todo: if the pid is 0, it means "not found" -> exception? return _libxdo.xdo_get_pid_window(self._xdo, window) def get_focused_window_sane(self): """ Like xdo_get_focused_window, but return the first ancestor-or-self window * having a property of WM_CLASS. This allows you to get the "real" or top-level-ish window having focus rather than something you may not expect to be the window having focused. :param window_ret: Pointer to a window where the currently-focused window will be stored. """ window_ret = window_t(0) _libxdo.xdo_get_focused_window_sane( self._xdo, ctypes.byref(window_ret)) return window_ret.value def activate_window(self, window): """ Activate a window. This is generally a better choice than xdo_focus_window for a variety of reasons, but it requires window manager support: - If the window is on another desktop, that desktop is switched to. - It moves the window forward rather than simply focusing it Requires your window manager to support this. Uses _NET_ACTIVE_WINDOW from the EWMH spec. :param wid: the window to activate """ _libxdo.xdo_activate_window(self._xdo, window) def wait_for_window_active(self, window, active=1): """ Wait for a window to be active or not active. Requires your window manager to support this. Uses _NET_ACTIVE_WINDOW from the EWMH spec. :param window: the window to wait on :param active: If 1, wait for active. If 0, wait for inactive. """ _libxdo.xdo_wait_for_window_active(self._xdo, window, active) def map_window(self, window): """ Map a window. This mostly means to make the window visible if it is not currently mapped. :param wid: the window to map. """ _libxdo.xdo_map_window(self._xdo, window) def unmap_window(self, window): """ Unmap a window :param wid: the window to unmap """ _libxdo.xdo_unmap_window(self._xdo, window) def minimize_window(self, window): """Minimize a window""" _libxdo.xdo_minimize_window(self._xdo, window) def reparent_window(self, window_source, window_target): """ Reparents a window :param wid_source: the window to reparent :param wid_target: the new parent window """ _libxdo.xdo_reparent_window(self._xdo, window_source, window_target) def get_window_location(self, window): """ Get a window's location. """ screen_ret = Screen() x_ret = ctypes.c_int(0) y_ret = ctypes.c_int(0) _libxdo.xdo_get_window_location( self._xdo, window, ctypes.byref(x_ret), ctypes.byref(y_ret), ctypes.byref(screen_ret)) return window_location(x_ret.value, y_ret.value, screen_ret) def get_window_size(self, window): """ Get a window's size. """ w_ret = ctypes.c_uint(0) h_ret = ctypes.c_uint(0) _libxdo.xdo_get_window_size(self._xdo, window, ctypes.byref(w_ret), ctypes.byref(h_ret)) return window_size(w_ret.value, h_ret.value) def get_active_window(self): """ Get the currently-active window. Requires your window manager to support this. Uses ``_NET_ACTIVE_WINDOW`` from the EWMH spec. """ window_ret = window_t(0) _libxdo.xdo_get_active_window(self._xdo, ctypes.byref(window_ret)) return window_ret.value def select_window_with_click(self): """ Get a window ID by clicking on it. This function blocks until a selection is made. """ window_ret = window_t(0) _libxdo.xdo_select_window_with_click( self._xdo, ctypes.byref(window_ret)) return window_ret.value def set_number_of_desktops(self, ndesktops): """ Set the number of desktops. Uses ``_NET_NUMBER_OF_DESKTOPS`` of the EWMH spec. :param ndesktops: the new number of desktops to set. """ _libxdo.xdo_set_number_of_desktops(self._xdo, ndesktops) def get_number_of_desktops(self): """ Get the current number of desktops. Uses ``_NET_NUMBER_OF_DESKTOPS`` of the EWMH spec. :param ndesktops: pointer to long where the current number of desktops is stored """ ndesktops = ctypes.c_long(0) _libxdo.xdo_get_number_of_desktops(self._xdo, ctypes.byref(ndesktops)) return ndesktops.value def set_current_desktop(self, desktop): """ Switch to another desktop. Uses ``_NET_CURRENT_DESKTOP`` of the EWMH spec. :param desktop: The desktop number to switch to. """ _libxdo.xdo_set_current_desktop(self._xdo, desktop) def get_current_desktop(self): """ Get the current desktop. Uses ``_NET_CURRENT_DESKTOP`` of the EWMH spec. """ desktop = ctypes.c_long(0) _libxdo.xdo_get_current_desktop(self._xdo, ctypes.byref(desktop)) return desktop.value def set_desktop_for_window(self, window, desktop): """ Move a window to another desktop Uses _NET_WM_DESKTOP of the EWMH spec. :param wid: the window to move :param desktop: the desktop destination for the window """ _libxdo.xdo_set_desktop_for_window(self._xdo, window, desktop) def get_desktop_for_window(self, window): """ Get the desktop a window is on. Uses _NET_WM_DESKTOP of the EWMH spec. If your desktop does not support ``_NET_WM_DESKTOP``, then '*desktop' remains unmodified. :param wid: the window to query """ desktop = ctypes.c_long(0) _libxdo.xdo_get_desktop_for_window( self._xdo, window, ctypes.byref(desktop)) return desktop.value def search_windows( self, winname=None, winclass=None, winclassname=None, pid=None, only_visible=False, screen=None, require=False, searchmask=0, desktop=None, limit=0, max_depth=-1): """ Search for windows. :param winname: Regexp to be matched against window name :param winclass: Regexp to be matched against window class :param winclassname: Regexp to be matched against window class name :param pid: Only return windows from this PID :param only_visible: If True, only return visible windows :param screen: Search only windows on this screen :param require: If True, will match ALL conditions. Otherwise, windows matching ANY condition will be returned. :param searchmask: Search mask, for advanced usage. Leave this alone if you don't kwnow what you are doing. :param limit: Maximum number of windows to list. Zero means no limit. :param max_depth: Maximum depth to return. Defaults to -1, meaning "no limit". :return: A list of window ids matching query. """ windowlist_ret = ctypes.pointer(window_t(0)) nwindows_ret = ctypes.c_uint(0) search = xdo_search_t(searchmask=searchmask) if winname is not None: search.winname = winname search.searchmask |= SEARCH_NAME if winclass is not None: search.winclass = winclass search.searchmask |= SEARCH_CLASS if winclassname is not None: search.winclassname = winclassname search.searchmask |= SEARCH_CLASSNAME if pid is not None: search.pid = pid search.searchmask |= SEARCH_PID if only_visible: search.only_visible = True search.searchmask |= SEARCH_ONLYVISIBLE if screen is not None: search.screen = screen search.searchmask |= SEARCH_SCREEN if screen is not None: search.screen = desktop search.searchmask |= SEARCH_DESKTOP search.limit = limit search.max_depth = max_depth _libxdo.xdo_search_windows( self._xdo, search, ctypes.byref(windowlist_ret), ctypes.byref(nwindows_ret)) return [windowlist_ret[i] for i in range(nwindows_ret.value)] def get_window_property_by_atom(self, window, atom): # todo: figure out what exactly this method does, and implement it raise NotImplemented( "get_window_property_by_atom() is not implemented (yet)") def get_window_property(self, window, name): value = ctypes.c_char_p() # unsigned char **value nitems = ctypes.c_long() type_ = Atom() size = ctypes.c_int(0) _libxdo.xdo_get_window_property( self._xdo, window, name, ctypes.byref(value), ctypes.byref(nitems), ctypes.byref(type_), ctypes.byref(size)) # todo: we need to convert atoms into their actual type.. values = [] for i in range(nitems): i_val = value[i] # i_type = type_[i] values.append(i_val) # todo: perform type conversion for "Atom"s of this type? # todo: how does the "Atom" thing work? return values def get_input_state(self): """ Get the current input state. :return: a namedtuple with the following (boolean) fields: shift, lock, control, mod1, mod2, mod3, mod4, mod5 """ mask = _libxdo.xdo_get_input_state(self._xdo) return _gen_input_mask(mask) def get_symbol_map(self): """ If you need the symbol map, use this method. The symbol map is an array of string pairs mapping common tokens to X Keysym strings, such as "alt" to "Alt_L" :return: array of strings. """ # todo: make sure we return a list of strings! sm = _libxdo.xdo_get_symbol_map() # Return value is like: # ['alt', 'Alt_L', ..., None, None, None, ...] # We want to return only values up to the first None. # todo: any better solution than this? i = 0 ret = [] while True: c = sm[i] if c is None: return ret ret.append(c) i += 1 def get_active_modifiers(self): """ Get a list of active keys. Uses XQueryKeymap. :return: list of charcodemap_t instances """ keys = ctypes.pointer(charcodemap_t()) nkeys = ctypes.c_int(0) _libxdo.xdo_get_active_modifiers( self._xdo, ctypes.byref(keys), ctypes.byref(nkeys)) return [keys[i] for i in range(nkeys.value)] def clear_active_modifiers(self, window, mods=None): """ Send any events necesary to clear the the active modifiers. For example, if you are holding 'alt' when xdo_get_active_modifiers is called, then this method will send a key-up for 'alt' """ raise NotImplementedError() def set_active_modifiers(self, window, mods=None): """ Send any events necessary to make these modifiers active. This is useful if you just cleared the active modifiers and then wish to restore them after. """ raise NotImplementedError() def get_desktop_viewport(self): """ Get the position of the current viewport. This is only relevant if your window manager supports ``_NET_DESKTOP_VIEWPORT``. """ raise NotImplementedError() def set_desktop_viewport(self, x, y): """ Set the position of the current viewport. This is only relevant if your window manager supports ``_NET_DESKTOP_VIEWPORT`` """ raise NotImplementedError() def kill_window(self): """ Kill a window and the client owning it. """ raise NotImplementedError() XDO_FIND_PARENTS = 0 XDO_FIND_CHILDREN = 1 def find_window_client(self): """ Find a client window (child) in a given window. Useful if you get the window manager's decorator window rather than the client window. """ raise NotImplementedError() def get_window_name(self, win_id): """ Get a window's name, if any. """ window = window_t(win_id) name_ptr = ctypes.c_char_p() name_len = ctypes.c_int(0) name_type = ctypes.c_int(0) _libxdo.xdo_get_window_name( self._xdo, window, ctypes.byref(name_ptr), ctypes.byref(name_len), ctypes.byref(name_type)) name = name_ptr.value _libX11.XFree(name_ptr) # Free the string allocated by Xlib return name def enable_feature(self): """ Enable an xdo feature. This function is mainly used by libxdo itself, however, you may find it useful in your own applications. :see: XDO_FEATURES """ raise NotImplementedError() def has_feature(self): """ Check if a feature is enabled. This function is mainly used by libxdo itself, however, you may find it useful in your own applications. :see: XDO_FEATURES """ raise NotImplementedError() def get_viewport_dimensions(self): """ Query the viewport (your display) dimensions If Xinerama is active and supported, that api internally is used. If Xineram is disabled, we will report the root window's dimensions for the given screen. """ raise NotImplementedError() def __del__(self): _libxdo.xdo_free(self._xdo)
[ "ctypes.c_char_p", "ctypes.c_int", "six.moves.range", "ctypes.byref", "ctypes.c_ulong", "os.environ.get", "collections.namedtuple", "ctypes.c_long", "ctypes.c_uint", "ctypes.POINTER" ]
[((488, 534), 'collections.namedtuple', 'namedtuple', (['"""mouse_location"""', '"""x,y,screen_num"""'], {}), "('mouse_location', 'x,y,screen_num')\n", (498, 534), False, 'from collections import namedtuple\n'), ((553, 607), 'collections.namedtuple', 'namedtuple', (['"""mouse_location2"""', '"""x,y,screen_num,window"""'], {}), "('mouse_location2', 'x,y,screen_num,window')\n", (563, 607), False, 'from collections import namedtuple\n'), ((626, 669), 'collections.namedtuple', 'namedtuple', (['"""window_location"""', '"""x,y,screen"""'], {}), "('window_location', 'x,y,screen')\n", (636, 669), False, 'from collections import namedtuple\n'), ((684, 725), 'collections.namedtuple', 'namedtuple', (['"""window_size"""', '"""width,height"""'], {}), "('window_size', 'width,height')\n", (694, 725), False, 'from collections import namedtuple\n'), ((739, 810), 'collections.namedtuple', 'namedtuple', (['"""input_mask"""', '"""shift,lock,control,mod1,mod2,mod3,mod4,mod5"""'], {}), "('input_mask', 'shift,lock,control,mod1,mod2,mod3,mod4,mod5')\n", (749, 810), False, 'from collections import namedtuple\n'), ((3150, 3165), 'ctypes.c_int', 'ctypes.c_int', (['x'], {}), '(x)\n', (3162, 3165), False, 'import ctypes\n'), ((3178, 3193), 'ctypes.c_int', 'ctypes.c_int', (['y'], {}), '(y)\n', (3190, 3193), False, 'import ctypes\n'), ((3211, 3231), 'ctypes.c_int', 'ctypes.c_int', (['screen'], {}), '(screen)\n', (3223, 3231), False, 'import ctypes\n'), ((5251, 5266), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (5263, 5266), False, 'import ctypes\n'), ((5279, 5294), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (5291, 5294), False, 'import ctypes\n'), ((5316, 5331), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (5328, 5331), False, 'import ctypes\n'), ((5665, 5682), 'ctypes.c_ulong', 'ctypes.c_ulong', (['(0)'], {}), '(0)\n', (5679, 5682), False, 'import ctypes\n'), ((6008, 6023), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (6020, 6023), False, 'import ctypes\n'), ((6036, 6051), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (6048, 6051), False, 'import ctypes\n'), ((6077, 6094), 'ctypes.c_ulong', 'ctypes.c_ulong', (['(0)'], {}), '(0)\n', (6091, 6094), False, 'import ctypes\n'), ((6116, 6133), 'ctypes.c_ulong', 'ctypes.c_ulong', (['(0)'], {}), '(0)\n', (6130, 6133), False, 'import ctypes\n'), ((11607, 11629), 'ctypes.POINTER', 'POINTER', (['charcodemap_t'], {}), '(charcodemap_t)\n', (11614, 11629), False, 'from ctypes import POINTER\n'), ((11646, 11661), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (11658, 11661), False, 'import ctypes\n'), ((13782, 13798), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (13795, 13798), False, 'import ctypes\n'), ((13820, 13836), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (13833, 13836), False, 'import ctypes\n'), ((20208, 20223), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (20220, 20223), False, 'import ctypes\n'), ((20240, 20255), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (20252, 20255), False, 'import ctypes\n'), ((20586, 20602), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (20599, 20602), False, 'import ctypes\n'), ((20619, 20635), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (20632, 20635), False, 'import ctypes\n'), ((22079, 22095), 'ctypes.c_long', 'ctypes.c_long', (['(0)'], {}), '(0)\n', (22092, 22095), False, 'import ctypes\n'), ((22651, 22667), 'ctypes.c_long', 'ctypes.c_long', (['(0)'], {}), '(0)\n', (22664, 22667), False, 'import ctypes\n'), ((23437, 23453), 'ctypes.c_long', 'ctypes.c_long', (['(0)'], {}), '(0)\n', (23450, 23453), False, 'import ctypes\n'), ((24932, 24948), 'ctypes.c_uint', 'ctypes.c_uint', (['(0)'], {}), '(0)\n', (24945, 24948), False, 'import ctypes\n'), ((26408, 26425), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (26423, 26425), False, 'import ctypes\n'), ((26468, 26483), 'ctypes.c_long', 'ctypes.c_long', ([], {}), '()\n', (26481, 26483), False, 'import ctypes\n'), ((26522, 26537), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (26534, 26537), False, 'import ctypes\n'), ((26817, 26830), 'six.moves.range', 'range', (['nitems'], {}), '(nitems)\n', (26822, 26830), False, 'from six.moves import range\n'), ((28363, 28378), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (28375, 28378), False, 'import ctypes\n'), ((30240, 30257), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (30255, 30257), False, 'import ctypes\n'), ((30277, 30292), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (30289, 30292), False, 'import ctypes\n'), ((30313, 30328), 'ctypes.c_int', 'ctypes.c_int', (['(0)'], {}), '(0)\n', (30325, 30328), False, 'import ctypes\n'), ((1641, 1670), 'os.environ.get', 'os.environ.get', (['"""DISPLAY"""', '""""""'], {}), "('DISPLAY', '')\n", (1655, 1670), False, 'import os\n'), ((3681, 3703), 'ctypes.c_ulong', 'ctypes.c_ulong', (['window'], {}), '(window)\n', (3695, 3703), False, 'import ctypes\n'), ((4483, 4505), 'ctypes.c_ulong', 'ctypes.c_ulong', (['window'], {}), '(window)\n', (4497, 4505), False, 'import ctypes\n'), ((4507, 4527), 'ctypes.c_int', 'ctypes.c_int', (['button'], {}), '(button)\n', (4519, 4527), False, 'import ctypes\n'), ((4987, 5009), 'ctypes.c_ulong', 'ctypes.c_ulong', (['window'], {}), '(window)\n', (5001, 5009), False, 'import ctypes\n'), ((5011, 5031), 'ctypes.c_int', 'ctypes.c_int', (['button'], {}), '(button)\n', (5023, 5031), False, 'import ctypes\n'), ((5395, 5410), 'ctypes.byref', 'ctypes.byref', (['x'], {}), '(x)\n', (5407, 5410), False, 'import ctypes\n'), ((5412, 5427), 'ctypes.byref', 'ctypes.byref', (['y'], {}), '(y)\n', (5424, 5427), False, 'import ctypes\n'), ((5441, 5465), 'ctypes.byref', 'ctypes.byref', (['screen_num'], {}), '(screen_num)\n', (5453, 5465), False, 'import ctypes\n'), ((5734, 5758), 'ctypes.byref', 'ctypes.byref', (['window_ret'], {}), '(window_ret)\n', (5746, 5758), False, 'import ctypes\n'), ((6198, 6213), 'ctypes.byref', 'ctypes.byref', (['x'], {}), '(x)\n', (6210, 6213), False, 'import ctypes\n'), ((6215, 6230), 'ctypes.byref', 'ctypes.byref', (['y'], {}), '(y)\n', (6227, 6230), False, 'import ctypes\n'), ((6244, 6272), 'ctypes.byref', 'ctypes.byref', (['screen_num_ret'], {}), '(screen_num_ret)\n', (6256, 6272), False, 'import ctypes\n'), ((6274, 6298), 'ctypes.byref', 'ctypes.byref', (['window_ret'], {}), '(window_ret)\n', (6286, 6298), False, 'import ctypes\n'), ((10165, 10186), 'ctypes.c_ulong', 'ctypes.c_ulong', (['delay'], {}), '(delay)\n', (10179, 10186), False, 'import ctypes\n'), ((10431, 10452), 'ctypes.c_ulong', 'ctypes.c_ulong', (['delay'], {}), '(delay)\n', (10445, 10452), False, 'import ctypes\n'), ((11738, 11756), 'ctypes.byref', 'ctypes.byref', (['keys'], {}), '(keys)\n', (11750, 11756), False, 'import ctypes\n'), ((11758, 11777), 'ctypes.byref', 'ctypes.byref', (['nkeys'], {}), '(nkeys)\n', (11770, 11777), False, 'import ctypes\n'), ((13947, 13970), 'ctypes.byref', 'ctypes.byref', (['width_ret'], {}), '(width_ret)\n', (13959, 13970), False, 'import ctypes\n'), ((13984, 14008), 'ctypes.byref', 'ctypes.byref', (['height_ret'], {}), '(height_ret)\n', (13996, 14008), False, 'import ctypes\n'), ((16758, 16782), 'ctypes.byref', 'ctypes.byref', (['window_ret'], {}), '(window_ret)\n', (16770, 16782), False, 'import ctypes\n'), ((18144, 18168), 'ctypes.byref', 'ctypes.byref', (['window_ret'], {}), '(window_ret)\n', (18156, 18168), False, 'import ctypes\n'), ((20328, 20347), 'ctypes.byref', 'ctypes.byref', (['x_ret'], {}), '(x_ret)\n', (20340, 20347), False, 'import ctypes\n'), ((20349, 20368), 'ctypes.byref', 'ctypes.byref', (['y_ret'], {}), '(y_ret)\n', (20361, 20368), False, 'import ctypes\n'), ((20382, 20406), 'ctypes.byref', 'ctypes.byref', (['screen_ret'], {}), '(screen_ret)\n', (20394, 20406), False, 'import ctypes\n'), ((20691, 20710), 'ctypes.byref', 'ctypes.byref', (['w_ret'], {}), '(w_ret)\n', (20703, 20710), False, 'import ctypes\n'), ((20748, 20767), 'ctypes.byref', 'ctypes.byref', (['h_ret'], {}), '(h_ret)\n', (20760, 20767), False, 'import ctypes\n'), ((21113, 21137), 'ctypes.byref', 'ctypes.byref', (['window_ret'], {}), '(window_ret)\n', (21125, 21137), False, 'import ctypes\n'), ((21437, 21461), 'ctypes.byref', 'ctypes.byref', (['window_ret'], {}), '(window_ret)\n', (21449, 21461), False, 'import ctypes\n'), ((22150, 22173), 'ctypes.byref', 'ctypes.byref', (['ndesktops'], {}), '(ndesktops)\n', (22162, 22173), False, 'import ctypes\n'), ((22719, 22740), 'ctypes.byref', 'ctypes.byref', (['desktop'], {}), '(desktop)\n', (22731, 22740), False, 'import ctypes\n'), ((23529, 23550), 'ctypes.byref', 'ctypes.byref', (['desktop'], {}), '(desktop)\n', (23541, 23550), False, 'import ctypes\n'), ((25968, 25996), 'ctypes.byref', 'ctypes.byref', (['windowlist_ret'], {}), '(windowlist_ret)\n', (25980, 25996), False, 'import ctypes\n'), ((26010, 26036), 'ctypes.byref', 'ctypes.byref', (['nwindows_ret'], {}), '(nwindows_ret)\n', (26022, 26036), False, 'import ctypes\n'), ((26617, 26636), 'ctypes.byref', 'ctypes.byref', (['value'], {}), '(value)\n', (26629, 26636), False, 'import ctypes\n'), ((26638, 26658), 'ctypes.byref', 'ctypes.byref', (['nitems'], {}), '(nitems)\n', (26650, 26658), False, 'import ctypes\n'), ((26672, 26691), 'ctypes.byref', 'ctypes.byref', (['type_'], {}), '(type_)\n', (26684, 26691), False, 'import ctypes\n'), ((26693, 26711), 'ctypes.byref', 'ctypes.byref', (['size'], {}), '(size)\n', (26705, 26711), False, 'import ctypes\n'), ((28445, 28463), 'ctypes.byref', 'ctypes.byref', (['keys'], {}), '(keys)\n', (28457, 28463), False, 'import ctypes\n'), ((28465, 28484), 'ctypes.byref', 'ctypes.byref', (['nkeys'], {}), '(nkeys)\n', (28477, 28484), False, 'import ctypes\n'), ((30397, 30419), 'ctypes.byref', 'ctypes.byref', (['name_ptr'], {}), '(name_ptr)\n', (30409, 30419), False, 'import ctypes\n'), ((30433, 30455), 'ctypes.byref', 'ctypes.byref', (['name_len'], {}), '(name_len)\n', (30445, 30455), False, 'import ctypes\n'), ((30457, 30480), 'ctypes.byref', 'ctypes.byref', (['name_type'], {}), '(name_type)\n', (30469, 30480), False, 'import ctypes\n'), ((26082, 26107), 'six.moves.range', 'range', (['nwindows_ret.value'], {}), '(nwindows_ret.value)\n', (26087, 26107), False, 'from six.moves import range\n'), ((28519, 28537), 'six.moves.range', 'range', (['nkeys.value'], {}), '(nkeys.value)\n', (28524, 28537), False, 'from six.moves import range\n')]
import django_filters from django import forms from django.conf import settings from django.db import models from extras.models import Tag def multivalue_field_factory(field_class): """ Given a form field class, return a subclass capable of accepting multiple values. This allows us to OR on multiple filter values while maintaining the field's built-in validation. Example: GET /api/dcim/devices/?name=foo&name=bar """ class NewField(field_class): widget = forms.SelectMultiple def to_python(self, value): if not value: return [] return [ # Only append non-empty values (this avoids e.g. trying to cast '' as an integer) super(field_class, self).to_python(v) for v in value if v ] return type('MultiValue{}'.format(field_class.__name__), (NewField,), dict()) # # Filters # class MultiValueCharFilter(django_filters.MultipleChoiceFilter): field_class = multivalue_field_factory(forms.CharField) class MultiValueDateFilter(django_filters.MultipleChoiceFilter): field_class = multivalue_field_factory(forms.DateField) class MultiValueDateTimeFilter(django_filters.MultipleChoiceFilter): field_class = multivalue_field_factory(forms.DateTimeField) class MultiValueNumberFilter(django_filters.MultipleChoiceFilter): field_class = multivalue_field_factory(forms.IntegerField) class MultiValueTimeFilter(django_filters.MultipleChoiceFilter): field_class = multivalue_field_factory(forms.TimeField) class TreeNodeMultipleChoiceFilter(django_filters.ModelMultipleChoiceFilter): """ Filters for a set of Models, including all descendant models within a Tree. Example: [<Region: R1>,<Region: R2>] """ def filter(self, qs, value): value = [node.get_descendants(include_self=True) for node in value] return super().filter(qs, value) class NumericInFilter(django_filters.BaseInFilter, django_filters.NumberFilter): """ Filters for a set of numeric values. Example: id__in=100,200,300 """ pass class NullableCharFieldFilter(django_filters.CharFilter): """ Allow matching on null field values by passing a special string used to signify NULL. """ def filter(self, qs, value): if value != settings.FILTERS_NULL_CHOICE_VALUE: return super().filter(qs, value) qs = self.get_method(qs)(**{'{}__isnull'.format(self.field_name): True}) return qs.distinct() if self.distinct else qs class TagFilter(django_filters.ModelMultipleChoiceFilter): """ Match on one or more assigned tags. If multiple tags are specified (e.g. ?tag=foo&tag=bar), the queryset is filtered to objects matching all tags. """ def __init__(self, *args, **kwargs): kwargs.setdefault('field_name', 'tags__slug') kwargs.setdefault('to_field_name', 'slug') kwargs.setdefault('conjoined', True) kwargs.setdefault('queryset', Tag.objects.all()) super().__init__(*args, **kwargs) # # FilterSets # class NameSlugSearchFilterSet(django_filters.FilterSet): """ A base class for adding the search method to models which only expose the `name` and `slug` fields """ q = django_filters.CharFilter( method='search', label='Search', ) def search(self, queryset, name, value): if not value.strip(): return queryset return queryset.filter( models.Q(name__icontains=value) | models.Q(slug__icontains=value) ) # # Update default filters # FILTER_DEFAULTS = django_filters.filterset.FILTER_FOR_DBFIELD_DEFAULTS FILTER_DEFAULTS.update({ models.AutoField: { 'filter_class': MultiValueNumberFilter }, models.CharField: { 'filter_class': MultiValueCharFilter }, models.DateField: { 'filter_class': MultiValueDateFilter }, models.DateTimeField: { 'filter_class': MultiValueDateTimeFilter }, models.DecimalField: { 'filter_class': MultiValueNumberFilter }, models.EmailField: { 'filter_class': MultiValueCharFilter }, models.FloatField: { 'filter_class': MultiValueNumberFilter }, models.IntegerField: { 'filter_class': MultiValueNumberFilter }, models.PositiveIntegerField: { 'filter_class': MultiValueNumberFilter }, models.PositiveSmallIntegerField: { 'filter_class': MultiValueNumberFilter }, models.SlugField: { 'filter_class': MultiValueCharFilter }, models.SmallIntegerField: { 'filter_class': MultiValueNumberFilter }, models.TimeField: { 'filter_class': MultiValueTimeFilter }, models.URLField: { 'filter_class': MultiValueCharFilter }, })
[ "django.db.models.Q", "django_filters.CharFilter", "extras.models.Tag.objects.all" ]
[((3257, 3315), 'django_filters.CharFilter', 'django_filters.CharFilter', ([], {'method': '"""search"""', 'label': '"""Search"""'}), "(method='search', label='Search')\n", (3282, 3315), False, 'import django_filters\n'), ((2991, 3008), 'extras.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (3006, 3008), False, 'from extras.models import Tag\n'), ((3487, 3518), 'django.db.models.Q', 'models.Q', ([], {'name__icontains': 'value'}), '(name__icontains=value)\n', (3495, 3518), False, 'from django.db import models\n'), ((3533, 3564), 'django.db.models.Q', 'models.Q', ([], {'slug__icontains': 'value'}), '(slug__icontains=value)\n', (3541, 3564), False, 'from django.db import models\n')]
# -*- coding: utf-8 -*- """ Created on Sun May 23 16:38:11 2021 @author: Jaroslav """ # -*- coding: utf-8 -*- def f1(x): return x+1 x2=f1(1) print("vvedite vo skolko uvelichet functziy:") n=int(input()) def doublern (f): def g(n): return n*f return g #print(x2) g=doublern(x2) print(g(n)) print("vvedite vo skolko uvelichet functziy:") n=int(input()) def f1(x): return x+n x2=f1(n) def doublern (f): def g(n): return n*f return g #print(x2) g=doublern(x2) print(g(n)) p='hello' print(p) #print(g(p)) text_file = open('C:/F#/exp2/file1.txt','r',encoding='utf8') print(text_file) line_list = text_file.readlines(); for line in line_list: print(line) text_file.close() import shutil shutil.copy('C:/F#/exp2/file1.txt', 'C:/F#/exp2/file3.txt') import os os.getcwd() print(os.listdir('C:/F#/exp2/')) #print(shutil.rmtree.avoids_symlink_attacks) import shutil for i in range(10): shutil.copy2('C:/F#/exp2/file1.txt', 'C:/F#/exp2/file1{}.txt'.format(i))
[ "os.getcwd", "os.listdir", "shutil.copy" ]
[((818, 877), 'shutil.copy', 'shutil.copy', (['"""C:/F#/exp2/file1.txt"""', '"""C:/F#/exp2/file3.txt"""'], {}), "('C:/F#/exp2/file1.txt', 'C:/F#/exp2/file3.txt')\n", (829, 877), False, 'import shutil\n'), ((892, 903), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (901, 903), False, 'import os\n'), ((913, 938), 'os.listdir', 'os.listdir', (['"""C:/F#/exp2/"""'], {}), "('C:/F#/exp2/')\n", (923, 938), False, 'import os\n')]
#@@---------------------------@@ # Author: <NAME> # Date: 5/18/17 # Description: #@@---------------------------@@ from mininet.log import setLogLevel, info, lg import sys import logging import subprocess class Logger(object): def __init__(self, terminal, filename): self.terminal = terminal self.log = filename with open(self.log, 'w') as f: f.write("****CONSOLE OUTPUT****" + '\n\n\n') def write(self, message): self.terminal.write(message) with open(self.log, 'a') as f: f.write(message + '\n') def flush(self): #this flush method is needed for python 3 compatibility. #this handles the flush command by doing nothing. #you might want to specify some extra behavior here. pass def log_to_file(file_name): with open(file_name, 'w') as f: f.write("****LOG OUTPUT****" + '\n\n\n') fh = logging.FileHandler(file_name) setLogLevel('info') lg.addHandler(fh) def start_logging(): ##Copy config file subprocess.call(['cp' , './configs.json', './data/PARAMS/']) ##Redirect python logger to a file log_to_file('data/PARAMS/output.log') ##Redirect Stout to a file sys.stdout = Logger(sys.stdout, "data/PARAMS/console.log")
[ "subprocess.call", "mininet.log.lg.addHandler", "logging.FileHandler", "mininet.log.setLogLevel" ]
[((919, 949), 'logging.FileHandler', 'logging.FileHandler', (['file_name'], {}), '(file_name)\n', (938, 949), False, 'import logging\n'), ((954, 973), 'mininet.log.setLogLevel', 'setLogLevel', (['"""info"""'], {}), "('info')\n", (965, 973), False, 'from mininet.log import setLogLevel, info, lg\n'), ((978, 995), 'mininet.log.lg.addHandler', 'lg.addHandler', (['fh'], {}), '(fh)\n', (991, 995), False, 'from mininet.log import setLogLevel, info, lg\n'), ((1045, 1104), 'subprocess.call', 'subprocess.call', (["['cp', './configs.json', './data/PARAMS/']"], {}), "(['cp', './configs.json', './data/PARAMS/'])\n", (1060, 1104), False, 'import subprocess\n')]
from ipykernel.kernelbase import Kernel from MDSplus import Data class MdstclKernel(Kernel): implementation = 'Mdstcl' implementation_version = '1.0' language = 'no-op' language_version = '0.1' language_info = { 'name': 'mdstcl commands', 'mimetype': 'text/plain', 'file_extension': '.tcl', } banner = "MDSplus Mdstcl kernel - Tree Command Language interpreter" def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): if not silent: try: lines=code.split('\n') for line in lines: ans = Data.execute('_status=tcl($1,_out),_out',line) status = int(Data.execute('_status')) if status & 1: stream_content = {'name': 'stdout', 'text': str(ans)} else: stream_content = {'name':'stderr','text': '\n'.join([line,str(ans)])} self.send_response(self.iopub_socket,'stream',stream_content) except Exception as e: stream_content = {'name': 'stderr', 'text': str(e)} self.send_response(self.iopub_socket, 'stream', stream_content) return {'status': 'ok', # The base class increments the execution count 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}, }
[ "MDSplus.Data.execute" ]
[((675, 722), 'MDSplus.Data.execute', 'Data.execute', (['"""_status=tcl($1,_out),_out"""', 'line'], {}), "('_status=tcl($1,_out),_out', line)\n", (687, 722), False, 'from MDSplus import Data\n'), ((753, 776), 'MDSplus.Data.execute', 'Data.execute', (['"""_status"""'], {}), "('_status')\n", (765, 776), False, 'from MDSplus import Data\n')]
# --- # jupyter: # jupytext: # formats: ipynb,py # text_representation: # extension: .py # format_name: light # format_version: '1.5' # jupytext_version: 1.9.1+dev # kernelspec: # display_name: Python [conda env:generic_expression] * # language: python # name: conda-env-generic_expression-py # --- # # Compare generic genes # # The goal of this notebook is to compare the generic genes found using the same template experiment run two times and 2 different recount2 template experiments. # + # %load_ext autoreload # %autoreload 2 import os from scipy import stats import seaborn as sns import pandas as pd import numpy as np import matplotlib.pyplot as plt from sklearn.preprocessing import MinMaxScaler from ponyo import utils # + # Read in config variables base_dir = os.path.abspath(os.path.join(os.getcwd(), "../")) config_filename = os.path.abspath( os.path.join(base_dir, "configs", "config_human_general.tsv") ) params = utils.read_config(config_filename) local_dir = params["local_dir"] project_id1 = "SRP012656" project_id2 = "SRP061689" # + # Get data directory containing gene summary data data_dir = os.path.join(base_dir, "human_general_analysis") # Get gene ranking files gene_ranking_filename1 = os.path.join( data_dir, f"generic_gene_summary_{project_id1}.tsv" ) gene_ranking_filename1_run2 = os.path.join( data_dir, f"generic_gene_summary_{project_id1}_run2.tsv" ) gene_ranking_filename2 = os.path.join( data_dir, f"generic_gene_summary_{project_id2}.tsv" ) # Get template data template_filename1 = os.path.join( data_dir, "data", f"processed_recount2_template_{project_id1}.tsv" ) template_filename2 = os.path.join( data_dir, "data", f"processed_recount2_template_{project_id2}.tsv" ) # - # ## Correlation between rankings between same experiment # # Here we compare gene ranking after running SOPHIE 2 times using the same template experiment but different seeds. # Load gene ranking gene_ranking_summary1 = pd.read_csv( gene_ranking_filename1, sep="\t", index_col=0, header=0 ) gene_ranking_summary1_run2 = pd.read_csv( gene_ranking_filename1_run2, sep="\t", index_col=0, header=0 ) # Get simulated ranking gene_ranking1 = ( gene_ranking_summary1["Rank (simulated)"].rename("Rank 1").to_frame("Rank 1") ) gene_ranking1_run2 = ( gene_ranking_summary1_run2["Rank (simulated)"].rename("Rank 2").to_frame("Rank 2") ) # + # Scale ranking to percentile (0,100) scaler = MinMaxScaler(feature_range=(0, 100)) gene_ranking1["Percentile 1"] = scaler.fit_transform( np.array(gene_ranking1["Rank 1"]).reshape(-1, 1) ) gene_ranking1_run2["Percentile 2"] = scaler.fit_transform( np.array(gene_ranking1_run2["Rank 2"]).reshape(-1, 1) ) gene_ranking1_run2.head() # - # Combine ranking gene_ranking_same_combined = pd.concat( [gene_ranking1["Percentile 1"], gene_ranking1_run2["Percentile 2"]], axis=1 ) print(gene_ranking_same_combined.shape) gene_ranking_same_combined.head() # Check for NAs gene_ranking_same_combined[pd.isnull(gene_ranking_same_combined).any(axis=1)] # + # Plot correlation between ranking r, p = stats.spearmanr( gene_ranking_same_combined["Percentile 1"], gene_ranking_same_combined["Percentile 2"], ) print(r, p) fig = sns.jointplot( data=gene_ranking_same_combined, x="Percentile 1", y="Percentile 2", kind="hex", marginal_kws={"color": "white", "edgecolor": "white"}, ) fig.set_axis_labels( f"Percentile in {project_id1}", f"Percentile in {project_id1} different runs", fontsize=14, fontname="Verdana", ) cbar_ax = fig.fig.add_axes([0.9, 0.25, 0.05, 0.4]) # x, y, width, height cb = plt.colorbar(cax=cbar_ax) cb.set_label("Number of genes") output_figure_filename = "concordance_between_same_recount2_templates.svg" fig.savefig( output_figure_filename, format="svg", bbox_inches="tight", transparent=True, pad_inches=0, dpi=300, ) # - # **Takeaway:** # * Running SOPHIE twice using the same template experiment will generate 2 different sets of simulated experiments. # * Since the template experiment is the same, these 2 sets of simulated experiments will have the same experimental design structure/biological context # * As expected, the concordance is very high especially for high ranked and low ranked genes. The genes in the middle rank are more sensitive to changes so you don't get as clear of a signal compared to the extreme ranked genes. # ## Correlation between rankings between 2 different experiments # # Here we compare gene ranking generated by SOPHIE using 2 different template experiments. # Load gene ranking gene_ranking_summary2 = pd.read_csv( gene_ranking_filename2, sep="\t", index_col=0, header=0 ) # Get simulated ranking gene_ranking1 = ( gene_ranking_summary1["Rank (simulated)"].rename("Rank 1").to_frame("Rank 1") ) gene_ranking2 = ( gene_ranking_summary2["Rank (simulated)"].rename("Rank 2").to_frame("Rank 2") ) # + # Scale ranking to percentile (0,100) scaler = MinMaxScaler(feature_range=(0, 100)) gene_ranking1["Percentile 1"] = scaler.fit_transform( np.array(gene_ranking1["Rank 1"]).reshape(-1, 1) ) gene_ranking2["Percentile 2"] = scaler.fit_transform( np.array(gene_ranking2["Rank 2"]).reshape(-1, 1) ) gene_ranking2.head() # - # Combine ranking gene_ranking_diff_combined = pd.concat( [gene_ranking1["Percentile 1"], gene_ranking2["Percentile 2"]], axis=1 ) print(gene_ranking_diff_combined.shape) gene_ranking_diff_combined.head() # Check for NAs gene_ranking_diff_combined[pd.isnull(gene_ranking_diff_combined).any(axis=1)] # + # Plot correlation between ranking r, p = stats.spearmanr( gene_ranking_diff_combined["Percentile 1"], gene_ranking_diff_combined["Percentile 2"], ) print(r, p) fig = sns.jointplot( data=gene_ranking_diff_combined, x="Percentile 1", y="Percentile 2", kind="hex", marginal_kws={"color": "white", "edgecolor": "white"}, ) fig.set_axis_labels( f"Percentile in {project_id1}", f"Percentile in {project_id2}", fontsize=14, fontname="Verdana", ) cbar_ax = fig.fig.add_axes([0.9, 0.25, 0.05, 0.4]) # x, y, width, height cb = plt.colorbar(cax=cbar_ax) cb.set_label("Number of genes") output_figure_filename = "concordance_between_diff_recount2_templates.svg" fig.savefig( output_figure_filename, format="svg", bbox_inches="tight", transparent=True, pad_inches=0, dpi=300, ) # - # **Takeaway:** # # * Looks like there is good concordance between highly ranked genes (i.e. generic genes) # * By comparison if we run SOPHIE using two different template experiments, there are genes in the off-diagonal regions that might indicate that there are generic within the given context of the specific experiment. # * In general, the genes in the middle rank are more sensitive to changes so you don't get as clear of a signal compared to the highest rank genes. # ## Examine gene expression data # Read expression data template_1 = pd.read_csv(template_filename1, sep="\t", index_col=0, header=0) template_2 = pd.read_csv(template_filename2, sep="\t", index_col=0, header=0) # + # Get concordance genes concordant_genes = list( gene_ranking_diff_combined[ (gene_ranking_diff_combined["Percentile 1"] > 80) & (gene_ranking_diff_combined["Percentile 2"] > 80) ].index ) # Get disconcordant genes discordant_genes = set(gene_ranking_diff_combined.index).difference(concordant_genes) # + # Distribution of concordant genes in template experiment 1 template1_mean = template_1.mean() print( "Percent concordant genes with 0 expression in template 1:", len(template1_mean[concordant_genes].loc[template1_mean[concordant_genes] == 0]) / len(template1_mean[concordant_genes]), ) print( "Percent nonzero concordant genes in template 1:", len( template1_mean[concordant_genes].loc[ (template1_mean[concordant_genes] > 0) & (template1_mean[concordant_genes] < 1000) ] ) / len(template1_mean[concordant_genes]), ) f1 = sns.distplot(template_1.mean()[concordant_genes], kde=False) f1.set_title(f"Expression of concordant genes in {project_id1}") f1.set_xlabel("log(gene expression)") f1.set_ylabel("log(count)") f1.set(xscale="log", yscale="log") # + # Distribution of concordant genes in template experiment 2 template2_mean = template_2.mean() print( "Percent concordant genes with 0 expression in template 2:", len(template2_mean[concordant_genes].loc[template2_mean[concordant_genes] == 0]) / len(template2_mean[concordant_genes]), ) print( "Percent nonzero concordant genes in template 2:", len( template2_mean[concordant_genes].loc[ (template2_mean[concordant_genes] > 0) & (template2_mean[concordant_genes] < 1000) ] ) / len(template2_mean[concordant_genes]), ) # There are more 0 expressed genes in this template experiment f2 = sns.distplot(template_2.mean()[concordant_genes], kde=False) f2.set_title(f"Expression of concordant genes in {project_id2}") f2.set_xlabel("log(gene expression)") f2.set_ylabel("log(count)") f2.set(xscale="log", yscale="log") # + # Distribution of discordant gense in template experiment 1 template1_mean = template_1.mean() print( "Percent discordant genes with 0 expression in template 1:", len(template1_mean[discordant_genes].loc[template1_mean[discordant_genes] == 0]) / len(template1_mean[discordant_genes]), ) print( "Percent nonzero discordant genes in template 1:", len( template1_mean[discordant_genes].loc[ (template1_mean[discordant_genes] > 0) & (template1_mean[discordant_genes] < 1000) ] ) / len(template1_mean[discordant_genes]), ) print( len(template1_mean[discordant_genes].loc[template1_mean[discordant_genes] > 0]) / len(template1_mean[discordant_genes]) ) f3 = sns.distplot(template_1.mean()[discordant_genes], kde=False) f3.set_title(f"Expression of discordant genes in {project_id1}") f3.set_xlabel("log(gene expression)") f3.set_ylabel("log(count)") f3.set(xscale="log", yscale="log") # + # Distribution of discordant genes in template experiment 2 template2_mean = template_2.mean() print( "Percent discordant genes with 0 expression in template 2:", len(template2_mean[discordant_genes].loc[template2_mean[discordant_genes] == 0]) / len(template2_mean[discordant_genes]), ) print( "Percent nonzero discordant genes in template 2:", len( template2_mean[discordant_genes].loc[ (template2_mean[discordant_genes] > 0) & (template2_mean[discordant_genes] < 1000) ] ) / len(template2_mean[discordant_genes]), ) f4 = sns.distplot(template_2.mean()[discordant_genes], kde=False) f4.set_title(f"Expression of discordant genes in {project_id2}") f4.set_xlabel("log(gene expression)") f4.set_ylabel("log(count)") f4.set(xscale="log", yscale="log") # - # **Takeaway:** # # Doesn't appear to be much of a difference between the distribution of average gene expression values for these two experiments. # # Theoretically, I would expect the scenario where a gene is lowly expressed in the context of template experiment 1 and therefore not found to be generic. But this same gene could be found to be generic in the context of template experiment 2 if it is more expressed. Its possible that differences in gene expression distribution can change which genes are found to be generic given that the simulation is producing experiments with a similar context. # # In this case, despite having similar gene expression distributions there are still many differences in gene ranking. This suggests to me that level of gene expression activity doesn't matter as much as the overall patterns perhaps. # # Overall we observe a slight shift showing that concordant genes are more lowly expressed compared to discordant genes, but most genes are still predominantly lowly gene expression. If most genes have expression levels very close to 0, then small fluctuations in the expression of some genes could lead to large changes in rank without changing the overall expression distribution.
[ "pandas.read_csv", "os.getcwd", "sklearn.preprocessing.MinMaxScaler", "scipy.stats.spearmanr", "ponyo.utils.read_config", "pandas.isnull", "matplotlib.pyplot.colorbar", "numpy.array", "seaborn.jointplot", "os.path.join", "pandas.concat" ]
[((987, 1021), 'ponyo.utils.read_config', 'utils.read_config', (['config_filename'], {}), '(config_filename)\n', (1004, 1021), False, 'from ponyo import utils\n'), ((1174, 1222), 'os.path.join', 'os.path.join', (['base_dir', '"""human_general_analysis"""'], {}), "(base_dir, 'human_general_analysis')\n", (1186, 1222), False, 'import os\n'), ((1274, 1339), 'os.path.join', 'os.path.join', (['data_dir', 'f"""generic_gene_summary_{project_id1}.tsv"""'], {}), "(data_dir, f'generic_gene_summary_{project_id1}.tsv')\n", (1286, 1339), False, 'import os\n'), ((1376, 1446), 'os.path.join', 'os.path.join', (['data_dir', 'f"""generic_gene_summary_{project_id1}_run2.tsv"""'], {}), "(data_dir, f'generic_gene_summary_{project_id1}_run2.tsv')\n", (1388, 1446), False, 'import os\n'), ((1478, 1543), 'os.path.join', 'os.path.join', (['data_dir', 'f"""generic_gene_summary_{project_id2}.tsv"""'], {}), "(data_dir, f'generic_gene_summary_{project_id2}.tsv')\n", (1490, 1543), False, 'import os\n'), ((1592, 1677), 'os.path.join', 'os.path.join', (['data_dir', '"""data"""', 'f"""processed_recount2_template_{project_id1}.tsv"""'], {}), "(data_dir, 'data', f'processed_recount2_template_{project_id1}.tsv'\n )\n", (1604, 1677), False, 'import os\n'), ((1700, 1785), 'os.path.join', 'os.path.join', (['data_dir', '"""data"""', 'f"""processed_recount2_template_{project_id2}.tsv"""'], {}), "(data_dir, 'data', f'processed_recount2_template_{project_id2}.tsv'\n )\n", (1712, 1785), False, 'import os\n'), ((2013, 2081), 'pandas.read_csv', 'pd.read_csv', (['gene_ranking_filename1'], {'sep': '"""\t"""', 'index_col': '(0)', 'header': '(0)'}), "(gene_ranking_filename1, sep='\\t', index_col=0, header=0)\n", (2024, 2081), True, 'import pandas as pd\n'), ((2117, 2190), 'pandas.read_csv', 'pd.read_csv', (['gene_ranking_filename1_run2'], {'sep': '"""\t"""', 'index_col': '(0)', 'header': '(0)'}), "(gene_ranking_filename1_run2, sep='\\t', index_col=0, header=0)\n", (2128, 2190), True, 'import pandas as pd\n'), ((2488, 2524), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': '(0, 100)'}), '(feature_range=(0, 100))\n', (2500, 2524), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((2834, 2925), 'pandas.concat', 'pd.concat', (["[gene_ranking1['Percentile 1'], gene_ranking1_run2['Percentile 2']]"], {'axis': '(1)'}), "([gene_ranking1['Percentile 1'], gene_ranking1_run2['Percentile 2'\n ]], axis=1)\n", (2843, 2925), True, 'import pandas as pd\n'), ((3144, 3251), 'scipy.stats.spearmanr', 'stats.spearmanr', (["gene_ranking_same_combined['Percentile 1']", "gene_ranking_same_combined['Percentile 2']"], {}), "(gene_ranking_same_combined['Percentile 1'],\n gene_ranking_same_combined['Percentile 2'])\n", (3159, 3251), False, 'from scipy import stats\n'), ((3278, 3436), 'seaborn.jointplot', 'sns.jointplot', ([], {'data': 'gene_ranking_same_combined', 'x': '"""Percentile 1"""', 'y': '"""Percentile 2"""', 'kind': '"""hex"""', 'marginal_kws': "{'color': 'white', 'edgecolor': 'white'}"}), "(data=gene_ranking_same_combined, x='Percentile 1', y=\n 'Percentile 2', kind='hex', marginal_kws={'color': 'white', 'edgecolor':\n 'white'})\n", (3291, 3436), True, 'import seaborn as sns\n'), ((3682, 3707), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'cax': 'cbar_ax'}), '(cax=cbar_ax)\n', (3694, 3707), True, 'import matplotlib.pyplot as plt\n'), ((4681, 4749), 'pandas.read_csv', 'pd.read_csv', (['gene_ranking_filename2'], {'sep': '"""\t"""', 'index_col': '(0)', 'header': '(0)'}), "(gene_ranking_filename2, sep='\\t', index_col=0, header=0)\n", (4692, 4749), True, 'import pandas as pd\n'), ((5037, 5073), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'feature_range': '(0, 100)'}), '(feature_range=(0, 100))\n', (5049, 5073), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((5368, 5453), 'pandas.concat', 'pd.concat', (["[gene_ranking1['Percentile 1'], gene_ranking2['Percentile 2']]"], {'axis': '(1)'}), "([gene_ranking1['Percentile 1'], gene_ranking2['Percentile 2']],\n axis=1)\n", (5377, 5453), True, 'import pandas as pd\n'), ((5673, 5780), 'scipy.stats.spearmanr', 'stats.spearmanr', (["gene_ranking_diff_combined['Percentile 1']", "gene_ranking_diff_combined['Percentile 2']"], {}), "(gene_ranking_diff_combined['Percentile 1'],\n gene_ranking_diff_combined['Percentile 2'])\n", (5688, 5780), False, 'from scipy import stats\n'), ((5807, 5965), 'seaborn.jointplot', 'sns.jointplot', ([], {'data': 'gene_ranking_diff_combined', 'x': '"""Percentile 1"""', 'y': '"""Percentile 2"""', 'kind': '"""hex"""', 'marginal_kws': "{'color': 'white', 'edgecolor': 'white'}"}), "(data=gene_ranking_diff_combined, x='Percentile 1', y=\n 'Percentile 2', kind='hex', marginal_kws={'color': 'white', 'edgecolor':\n 'white'})\n", (5820, 5965), True, 'import seaborn as sns\n'), ((6197, 6222), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'cax': 'cbar_ax'}), '(cax=cbar_ax)\n', (6209, 6222), True, 'import matplotlib.pyplot as plt\n'), ((7021, 7085), 'pandas.read_csv', 'pd.read_csv', (['template_filename1'], {'sep': '"""\t"""', 'index_col': '(0)', 'header': '(0)'}), "(template_filename1, sep='\\t', index_col=0, header=0)\n", (7032, 7085), True, 'import pandas as pd\n'), ((7099, 7163), 'pandas.read_csv', 'pd.read_csv', (['template_filename2'], {'sep': '"""\t"""', 'index_col': '(0)', 'header': '(0)'}), "(template_filename2, sep='\\t', index_col=0, header=0)\n", (7110, 7163), True, 'import pandas as pd\n'), ((913, 974), 'os.path.join', 'os.path.join', (['base_dir', '"""configs"""', '"""config_human_general.tsv"""'], {}), "(base_dir, 'configs', 'config_human_general.tsv')\n", (925, 974), False, 'import os\n'), ((853, 864), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (862, 864), False, 'import os\n'), ((2584, 2617), 'numpy.array', 'np.array', (["gene_ranking1['Rank 1']"], {}), "(gene_ranking1['Rank 1'])\n", (2592, 2617), True, 'import numpy as np\n'), ((2699, 2737), 'numpy.array', 'np.array', (["gene_ranking1_run2['Rank 2']"], {}), "(gene_ranking1_run2['Rank 2'])\n", (2707, 2737), True, 'import numpy as np\n'), ((3046, 3083), 'pandas.isnull', 'pd.isnull', (['gene_ranking_same_combined'], {}), '(gene_ranking_same_combined)\n', (3055, 3083), True, 'import pandas as pd\n'), ((5133, 5166), 'numpy.array', 'np.array', (["gene_ranking1['Rank 1']"], {}), "(gene_ranking1['Rank 1'])\n", (5141, 5166), True, 'import numpy as np\n'), ((5243, 5276), 'numpy.array', 'np.array', (["gene_ranking2['Rank 2']"], {}), "(gene_ranking2['Rank 2'])\n", (5251, 5276), True, 'import numpy as np\n'), ((5575, 5612), 'pandas.isnull', 'pd.isnull', (['gene_ranking_diff_combined'], {}), '(gene_ranking_diff_combined)\n', (5584, 5612), True, 'import pandas as pd\n')]
from fastapi import FastAPI from sqlalchemy import Column, Float, Integer, String from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from sqlalchemy_utils import create_database, database_exists, drop_database from fastapi_crudrouter import SQLAlchemyCRUDRouter from tests import ( Carrot, CarrotCreate, CarrotUpdate, CustomPotato, PAGINATION_SIZE, Potato, PotatoType, CUSTOM_TAGS, ) SQLALCHEMY_DATABASE_URL = "sqlite:///./test.db" def _setup_base_app(): if database_exists(SQLALCHEMY_DATABASE_URL): drop_database(SQLALCHEMY_DATABASE_URL) create_database(SQLALCHEMY_DATABASE_URL) app = FastAPI() engine = create_engine( SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} ) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) Base = declarative_base() def session(): session = SessionLocal() try: yield session session.commit() finally: session.close() return app, engine, Base, session def sqlalchemy_implementation(): app, engine, Base, session = _setup_base_app() class PotatoModel(Base): __tablename__ = "potatoes" id = Column(Integer, primary_key=True, index=True) thickness = Column(Float) mass = Column(Float) color = Column(String) type = Column(String) class CarrotModel(Base): __tablename__ = "carrots" id = Column(Integer, primary_key=True, index=True) length = Column(Float) color = Column(String) Base.metadata.create_all(bind=engine) app.include_router( SQLAlchemyCRUDRouter( schema=Potato, db_model=PotatoModel, db=session, prefix="potato", paginate=PAGINATION_SIZE, ) ) app.include_router( SQLAlchemyCRUDRouter( schema=Carrot, db_model=CarrotModel, db=session, create_schema=CarrotCreate, update_schema=CarrotUpdate, prefix="carrot", tags=CUSTOM_TAGS, ) ) return app # noinspection DuplicatedCode def sqlalchemy_implementation_custom_ids(): app, engine, Base, session = _setup_base_app() class PotatoModel(Base): __tablename__ = "potatoes" potato_id = Column(Integer, primary_key=True, index=True) thickness = Column(Float) mass = Column(Float) color = Column(String) type = Column(String) Base.metadata.create_all(bind=engine) app.include_router( SQLAlchemyCRUDRouter(schema=CustomPotato, db_model=PotatoModel, db=session) ) return app def sqlalchemy_implementation_string_pk(): app, engine, Base, session = _setup_base_app() class PotatoTypeModel(Base): __tablename__ = "potato_type" name = Column(String, primary_key=True, index=True) origin = Column(String) Base.metadata.create_all(bind=engine) app.include_router( SQLAlchemyCRUDRouter( schema=PotatoType, create_schema=PotatoType, db_model=PotatoTypeModel, db=session, prefix="potato_type", ) ) return app def sqlalchemy_implementation_integrity_errors(): app, engine, Base, session = _setup_base_app() class PotatoModel(Base): __tablename__ = "potatoes" id = Column(Integer, primary_key=True, index=True) thickness = Column(Float) mass = Column(Float) color = Column(String, unique=True) type = Column(String) class CarrotModel(Base): __tablename__ = "carrots" id = Column(Integer, primary_key=True, index=True) length = Column(Float) color = Column(String) Base.metadata.create_all(bind=engine) app.include_router( SQLAlchemyCRUDRouter( schema=Potato, db_model=PotatoModel, db=session, create_schema=Potato, prefix="potatoes", ) ) app.include_router( SQLAlchemyCRUDRouter( schema=Carrot, db_model=CarrotModel, db=session, update_schema=CarrotUpdate, prefix="carrots", ) ) return app
[ "sqlalchemy_utils.create_database", "sqlalchemy_utils.drop_database", "sqlalchemy_utils.database_exists", "fastapi_crudrouter.SQLAlchemyCRUDRouter", "sqlalchemy.ext.declarative.declarative_base", "sqlalchemy.Column", "sqlalchemy.create_engine", "sqlalchemy.orm.sessionmaker", "fastapi.FastAPI" ]
[((580, 620), 'sqlalchemy_utils.database_exists', 'database_exists', (['SQLALCHEMY_DATABASE_URL'], {}), '(SQLALCHEMY_DATABASE_URL)\n', (595, 620), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((674, 714), 'sqlalchemy_utils.create_database', 'create_database', (['SQLALCHEMY_DATABASE_URL'], {}), '(SQLALCHEMY_DATABASE_URL)\n', (689, 714), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((726, 735), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (733, 735), False, 'from fastapi import FastAPI\n'), ((750, 836), 'sqlalchemy.create_engine', 'create_engine', (['SQLALCHEMY_DATABASE_URL'], {'connect_args': "{'check_same_thread': False}"}), "(SQLALCHEMY_DATABASE_URL, connect_args={'check_same_thread': \n False})\n", (763, 836), False, 'from sqlalchemy import create_engine\n'), ((865, 925), 'sqlalchemy.orm.sessionmaker', 'sessionmaker', ([], {'autocommit': '(False)', 'autoflush': '(False)', 'bind': 'engine'}), '(autocommit=False, autoflush=False, bind=engine)\n', (877, 925), False, 'from sqlalchemy.orm import sessionmaker\n'), ((937, 955), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (953, 955), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((630, 668), 'sqlalchemy_utils.drop_database', 'drop_database', (['SQLALCHEMY_DATABASE_URL'], {}), '(SQLALCHEMY_DATABASE_URL)\n', (643, 668), False, 'from sqlalchemy_utils import create_database, database_exists, drop_database\n'), ((1325, 1370), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)'}), '(Integer, primary_key=True, index=True)\n', (1331, 1370), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1391, 1404), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (1397, 1404), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1420, 1433), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (1426, 1433), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1450, 1464), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1456, 1464), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1480, 1494), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1486, 1494), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1572, 1617), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)'}), '(Integer, primary_key=True, index=True)\n', (1578, 1617), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1635, 1648), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (1641, 1648), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1665, 1679), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (1671, 1679), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((1755, 1871), 'fastapi_crudrouter.SQLAlchemyCRUDRouter', 'SQLAlchemyCRUDRouter', ([], {'schema': 'Potato', 'db_model': 'PotatoModel', 'db': 'session', 'prefix': '"""potato"""', 'paginate': 'PAGINATION_SIZE'}), "(schema=Potato, db_model=PotatoModel, db=session,\n prefix='potato', paginate=PAGINATION_SIZE)\n", (1775, 1871), False, 'from fastapi_crudrouter import SQLAlchemyCRUDRouter\n'), ((1977, 2145), 'fastapi_crudrouter.SQLAlchemyCRUDRouter', 'SQLAlchemyCRUDRouter', ([], {'schema': 'Carrot', 'db_model': 'CarrotModel', 'db': 'session', 'create_schema': 'CarrotCreate', 'update_schema': 'CarrotUpdate', 'prefix': '"""carrot"""', 'tags': 'CUSTOM_TAGS'}), "(schema=Carrot, db_model=CarrotModel, db=session,\n create_schema=CarrotCreate, update_schema=CarrotUpdate, prefix='carrot',\n tags=CUSTOM_TAGS)\n", (1997, 2145), False, 'from fastapi_crudrouter import SQLAlchemyCRUDRouter\n'), ((2467, 2512), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)'}), '(Integer, primary_key=True, index=True)\n', (2473, 2512), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((2533, 2546), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (2539, 2546), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((2562, 2575), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (2568, 2575), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((2592, 2606), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (2598, 2606), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((2622, 2636), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (2628, 2636), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((2712, 2787), 'fastapi_crudrouter.SQLAlchemyCRUDRouter', 'SQLAlchemyCRUDRouter', ([], {'schema': 'CustomPotato', 'db_model': 'PotatoModel', 'db': 'session'}), '(schema=CustomPotato, db_model=PotatoModel, db=session)\n', (2732, 2787), False, 'from fastapi_crudrouter import SQLAlchemyCRUDRouter\n'), ((2993, 3037), 'sqlalchemy.Column', 'Column', (['String'], {'primary_key': '(True)', 'index': '(True)'}), '(String, primary_key=True, index=True)\n', (2999, 3037), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3055, 3069), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (3061, 3069), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3145, 3275), 'fastapi_crudrouter.SQLAlchemyCRUDRouter', 'SQLAlchemyCRUDRouter', ([], {'schema': 'PotatoType', 'create_schema': 'PotatoType', 'db_model': 'PotatoTypeModel', 'db': 'session', 'prefix': '"""potato_type"""'}), "(schema=PotatoType, create_schema=PotatoType, db_model=\n PotatoTypeModel, db=session, prefix='potato_type')\n", (3165, 3275), False, 'from fastapi_crudrouter import SQLAlchemyCRUDRouter\n'), ((3545, 3590), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)'}), '(Integer, primary_key=True, index=True)\n', (3551, 3590), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3611, 3624), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (3617, 3624), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3640, 3653), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (3646, 3653), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3670, 3697), 'sqlalchemy.Column', 'Column', (['String'], {'unique': '(True)'}), '(String, unique=True)\n', (3676, 3697), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3713, 3727), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (3719, 3727), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3805, 3850), 'sqlalchemy.Column', 'Column', (['Integer'], {'primary_key': '(True)', 'index': '(True)'}), '(Integer, primary_key=True, index=True)\n', (3811, 3850), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3868, 3881), 'sqlalchemy.Column', 'Column', (['Float'], {}), '(Float)\n', (3874, 3881), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3898, 3912), 'sqlalchemy.Column', 'Column', (['String'], {}), '(String)\n', (3904, 3912), False, 'from sqlalchemy import Column, Float, Integer, String\n'), ((3988, 4102), 'fastapi_crudrouter.SQLAlchemyCRUDRouter', 'SQLAlchemyCRUDRouter', ([], {'schema': 'Potato', 'db_model': 'PotatoModel', 'db': 'session', 'create_schema': 'Potato', 'prefix': '"""potatoes"""'}), "(schema=Potato, db_model=PotatoModel, db=session,\n create_schema=Potato, prefix='potatoes')\n", (4008, 4102), False, 'from fastapi_crudrouter import SQLAlchemyCRUDRouter\n'), ((4208, 4327), 'fastapi_crudrouter.SQLAlchemyCRUDRouter', 'SQLAlchemyCRUDRouter', ([], {'schema': 'Carrot', 'db_model': 'CarrotModel', 'db': 'session', 'update_schema': 'CarrotUpdate', 'prefix': '"""carrots"""'}), "(schema=Carrot, db_model=CarrotModel, db=session,\n update_schema=CarrotUpdate, prefix='carrots')\n", (4228, 4327), False, 'from fastapi_crudrouter import SQLAlchemyCRUDRouter\n')]
""" Steps: -get the summer.ai.pem key -login to the env.host machine and add your public key to the machine's authorized keys http://www.perrygeo.com/running-python-with-compiled-code-on-aws-lambda.html """ from fabric.api import local, sudo, run, warn_only, env, lcd, cd import yaml with open("serapis/config/default.yaml") as f: config = yaml.load(f) # the user to use for the remote commands env.user = 'ec2-user' # the servers where the commands are executed env.hosts = [config['ec2_ip']] gitfile = 'serapis.git.zip' lambdafile = 'serapis.lambda.zip' lambda_bucket = 'ai.summer.1mwords.test' lambdafunction = config['lambda_function_name'] corpora = [ 'nltk_data/taggers/averaged_perceptron_tagger/averaged_perceptron_tagger.pickle', 'nltk_data/tokenizers/punkt/english.pickle' ] def pack(): # Make sure machine and dev tools are up to date sudo('sudo yum -y update') sudo('sudo yum -y upgrade') sudo('yum install -y atlas-devel atlas-sse3-devel blas-devel gcc gcc-c++ lapack-devel python27-devel --enablerepo=epel') sudo('pip install -U pip') with warn_only(): run('rm ~/wordnik.zip') sudo('dd if=/dev/zero of=/swapfile bs=1024 count=1500000') sudo('mkswap /swapfile') sudo('chmod 0600 /swapfile') sudo('swapon /swapfile') run('/usr/bin/virtualenv --python /usr/bin/python build --always-copy --no-site-packages') run('source build/bin/activate') # Order is important here, so let's make sure we've got these right run('pip install -U pip') run('pip install --use-wheel numpy') run('pip install --use-wheel scipy') run('pip install --use-wheel sklearn') run('pip install --use-wheel pandas') with open('requirements.txt') as f: for req in f.read().splitlines(): if req.split("=")[0].lower() not in ('numpy', 'scipy', 'scikit-learn', 'sklearn', 'pandas'): run('pip install --use-wheel {}'.format(req)) for lib in ('lib', 'lib64'): # Strip SO files run('find "$VIRTUAL_ENV/{}/python2.7/site-packages/" -name "*.so" | xargs strip'.format(lib)) with cd('$VIRTUAL_ENV/{}/python2.7/site-packages/'.format(lib)): run('zip -r -9 -q ~/wordnik.zip *') # Get the file back onto our local machine local('scp %s@%s:~/wordnik.zip %s' % (env.user, env.hosts[0], lambdafile)) update() def install_corpora(): local("python -m nltk.downloader -d nltk_data {}".format(" ".join(config['nltk_corpora']))) def update(): # Run tests # local("py.test serapis/tests/") # Updates code in zip file with current Master without going to EC2 first. local('git archive --format=zip HEAD -o %s' % gitfile, capture=False) local('unzip -d git_tmp -o -u %s' % gitfile) with lcd('git_tmp'): local('zip -9r ../%s .' % lambdafile) local('zip -9 %s serapis/config/credentials.yaml' % lambdafile) for corpus in corpora: local('zip -9r {} {}'.format(lambdafile, corpus)) local('rm -r git_tmp') def qu(): local('zip -9 %s lambda_handler.py' % lambdafile) local('zip -9r %s serapis/' % lambdafile) local('zip -9r %s temp_models/' % lambdafile) def deploy(): # If this says that the function is not found, create it first: # aws lambda create-function --region us-east-1 --function-name WordTask --zip-file fileb://wordnik.lambda.zip --handler lambda_handler.handler --runtime python2.7 --timeout 10 --memory-size 512 --role arn:aws:iam::054978852993:role/lambda_basic_execution local('aws s3 cp {} s3://{}/{} --profile wordnik'.format(lambdafile, lambda_bucket, lambdafile)) local('aws lambda update-function-code --region us-east-1 --function-name {} --s3-bucket {} --s3-key {} --profile wordnik'.format(lambdafunction, lambda_bucket, lambdafile))
[ "yaml.load", "fabric.api.sudo", "fabric.api.local", "fabric.api.lcd", "fabric.api.run", "fabric.api.warn_only" ]
[((347, 359), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (356, 359), False, 'import yaml\n'), ((873, 899), 'fabric.api.sudo', 'sudo', (['"""sudo yum -y update"""'], {}), "('sudo yum -y update')\n", (877, 899), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((904, 931), 'fabric.api.sudo', 'sudo', (['"""sudo yum -y upgrade"""'], {}), "('sudo yum -y upgrade')\n", (908, 931), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((936, 1066), 'fabric.api.sudo', 'sudo', (['"""yum install -y atlas-devel atlas-sse3-devel blas-devel gcc gcc-c++ lapack-devel python27-devel --enablerepo=epel"""'], {}), "(\n 'yum install -y atlas-devel atlas-sse3-devel blas-devel gcc gcc-c++ lapack-devel python27-devel --enablerepo=epel'\n )\n", (940, 1066), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1061, 1087), 'fabric.api.sudo', 'sudo', (['"""pip install -U pip"""'], {}), "('pip install -U pip')\n", (1065, 1087), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1148, 1206), 'fabric.api.sudo', 'sudo', (['"""dd if=/dev/zero of=/swapfile bs=1024 count=1500000"""'], {}), "('dd if=/dev/zero of=/swapfile bs=1024 count=1500000')\n", (1152, 1206), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1211, 1235), 'fabric.api.sudo', 'sudo', (['"""mkswap /swapfile"""'], {}), "('mkswap /swapfile')\n", (1215, 1235), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1240, 1268), 'fabric.api.sudo', 'sudo', (['"""chmod 0600 /swapfile"""'], {}), "('chmod 0600 /swapfile')\n", (1244, 1268), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1273, 1297), 'fabric.api.sudo', 'sudo', (['"""swapon /swapfile"""'], {}), "('swapon /swapfile')\n", (1277, 1297), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1303, 1398), 'fabric.api.run', 'run', (['"""/usr/bin/virtualenv --python /usr/bin/python build --always-copy --no-site-packages"""'], {}), "('/usr/bin/virtualenv --python /usr/bin/python build --always-copy --no-site-packages'\n )\n", (1306, 1398), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1398, 1430), 'fabric.api.run', 'run', (['"""source build/bin/activate"""'], {}), "('source build/bin/activate')\n", (1401, 1430), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1508, 1533), 'fabric.api.run', 'run', (['"""pip install -U pip"""'], {}), "('pip install -U pip')\n", (1511, 1533), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1538, 1574), 'fabric.api.run', 'run', (['"""pip install --use-wheel numpy"""'], {}), "('pip install --use-wheel numpy')\n", (1541, 1574), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1579, 1615), 'fabric.api.run', 'run', (['"""pip install --use-wheel scipy"""'], {}), "('pip install --use-wheel scipy')\n", (1582, 1615), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1620, 1658), 'fabric.api.run', 'run', (['"""pip install --use-wheel sklearn"""'], {}), "('pip install --use-wheel sklearn')\n", (1623, 1658), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1663, 1700), 'fabric.api.run', 'run', (['"""pip install --use-wheel pandas"""'], {}), "('pip install --use-wheel pandas')\n", (1666, 1700), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2285, 2359), 'fabric.api.local', 'local', (["('scp %s@%s:~/wordnik.zip %s' % (env.user, env.hosts[0], lambdafile))"], {}), "('scp %s@%s:~/wordnik.zip %s' % (env.user, env.hosts[0], lambdafile))\n", (2290, 2359), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2650, 2719), 'fabric.api.local', 'local', (["('git archive --format=zip HEAD -o %s' % gitfile)"], {'capture': '(False)'}), "('git archive --format=zip HEAD -o %s' % gitfile, capture=False)\n", (2655, 2719), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2724, 2768), 'fabric.api.local', 'local', (["('unzip -d git_tmp -o -u %s' % gitfile)"], {}), "('unzip -d git_tmp -o -u %s' % gitfile)\n", (2729, 2768), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2844, 2907), 'fabric.api.local', 'local', (["('zip -9 %s serapis/config/credentials.yaml' % lambdafile)"], {}), "('zip -9 %s serapis/config/credentials.yaml' % lambdafile)\n", (2849, 2907), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2998, 3020), 'fabric.api.local', 'local', (['"""rm -r git_tmp"""'], {}), "('rm -r git_tmp')\n", (3003, 3020), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((3037, 3086), 'fabric.api.local', 'local', (["('zip -9 %s lambda_handler.py' % lambdafile)"], {}), "('zip -9 %s lambda_handler.py' % lambdafile)\n", (3042, 3086), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((3091, 3132), 'fabric.api.local', 'local', (["('zip -9r %s serapis/' % lambdafile)"], {}), "('zip -9r %s serapis/' % lambdafile)\n", (3096, 3132), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((3137, 3182), 'fabric.api.local', 'local', (["('zip -9r %s temp_models/' % lambdafile)"], {}), "('zip -9r %s temp_models/' % lambdafile)\n", (3142, 3182), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1098, 1109), 'fabric.api.warn_only', 'warn_only', ([], {}), '()\n', (1107, 1109), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((1119, 1142), 'fabric.api.run', 'run', (['"""rm ~/wordnik.zip"""'], {}), "('rm ~/wordnik.zip')\n", (1122, 1142), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2778, 2792), 'fabric.api.lcd', 'lcd', (['"""git_tmp"""'], {}), "('git_tmp')\n", (2781, 2792), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2802, 2839), 'fabric.api.local', 'local', (["('zip -9r ../%s .' % lambdafile)"], {}), "('zip -9r ../%s .' % lambdafile)\n", (2807, 2839), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n'), ((2197, 2232), 'fabric.api.run', 'run', (['"""zip -r -9 -q ~/wordnik.zip *"""'], {}), "('zip -r -9 -q ~/wordnik.zip *')\n", (2200, 2232), False, 'from fabric.api import local, sudo, run, warn_only, env, lcd, cd\n')]
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-11-07 08:01 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('project', '0037_auto_20171031_0715'), ] operations = [ migrations.RenameField( model_name='application', old_name='did_accept_date', new_name='decision_date', ), ]
[ "django.db.migrations.RenameField" ]
[((292, 398), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""application"""', 'old_name': '"""did_accept_date"""', 'new_name': '"""decision_date"""'}), "(model_name='application', old_name='did_accept_date',\n new_name='decision_date')\n", (314, 398), False, 'from django.db import migrations\n')]
import tensorflow as tf from layers.utils import CustomLayer class SELayer(CustomLayer): def __init__(self): super().__init__() def build(self, input_shape): B, H, W, C = input_shape self.squeeze = tf.keras.layers.GlobalAveragePooling2D() self.excitation = tf.keras.Sequential([ tf.keras.layers.Dense(C//16), tf.keras.layers.Dense(C, activation='sigmoid') ]) self.multi = tf.keras.layers.Multiply() def call(self, inputs): scale = inputs scale = self.squeeze(scale) scale = self.excitation(scale) outputs = self.multi([inputs, scale]) return outputs
[ "tensorflow.keras.layers.GlobalAveragePooling2D", "tensorflow.keras.layers.Multiply", "tensorflow.keras.layers.Dense" ]
[((234, 274), 'tensorflow.keras.layers.GlobalAveragePooling2D', 'tf.keras.layers.GlobalAveragePooling2D', ([], {}), '()\n', (272, 274), True, 'import tensorflow as tf\n'), ((456, 482), 'tensorflow.keras.layers.Multiply', 'tf.keras.layers.Multiply', ([], {}), '()\n', (480, 482), True, 'import tensorflow as tf\n'), ((335, 365), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['(C // 16)'], {}), '(C // 16)\n', (356, 365), True, 'import tensorflow as tf\n'), ((377, 423), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['C'], {'activation': '"""sigmoid"""'}), "(C, activation='sigmoid')\n", (398, 423), True, 'import tensorflow as tf\n')]
import matplotlib import matplotlib.pyplot as plt import numpy as np import os import warnings from matplotlib import colors matplotlib.rc("font",family='AR PL SungtiL GB') warnings.filterwarnings('ignore') def vis_national(national, native, null, x): fig = plt.figure() ax = fig.add_subplot(111) plt.grid() plt.title('2018-2020年度各区县馆编目数据占比') years = [ '国图编目数据库', '本地编目数据库', '无编目数据库' ] total_width, n = 0.8, len(years) width = total_width / n xx = np.arange(len(x)) xx = xx - (total_width - width) / 2 plt.bar(xx, national, width=width, label=years[0]) plt.bar(xx + width, native, width=width, label=years[1]) plt.bar(xx + 2 * width, null, width=width, label=years[2]) plt.xticks(range(len(x)), x, rotation = 45) plt.ylabel('百分比(%)') plt.xlabel('区县图书馆') plt.legend(loc='best') plt.axhline(50, color = 'r') name = 'data' plt.savefig(name) return def book_purchase(p18, p19, p20, x): fig = plt.figure() ax = fig.add_subplot(111) plt.grid() plt.title('2018-2020年度各区县馆中文图书入藏量') years = [ '2018', '2019', '2020' ] total_width, n = 0.8, len(years) width = total_width / n xx = np.arange(len(x)) xx = xx - (total_width - width) / 2 plt.bar(xx, p18, width=width, label=years[0]) plt.bar(xx + width, p19, width=width, label=years[1]) plt.bar(xx + 2 * width, p20, width=width, label=years[2]) plt.xticks(range(len(x)), x, rotation = 45) plt.ylabel('中文图书入藏量(册)') plt.xlabel('区县图书馆') plt.legend(loc='best') plt.axhline(20000, color = 'r') max18 = np.argmax(np.array(p18)) max19 = np.argmax(np.array(p19)) max20 = np.argmax(np.array(p20)) plt.annotate( x[max18] + str(p18[max18]), xy = (max18, p18[max18]), bbox = dict(fc=(1,0.9,0.9)) ) plt.annotate( x[max19] + str(p19[max19]), xy = (max19, p19[max19]), bbox = dict(fc=(1,0.9,0.9)) ) plt.annotate( x[max20] + str(p20[max20]), xy = (max20, p20[max20]), bbox = dict(fc=(1,0.9,0.9)) ) name = 'purchase' plt.savefig(name) return def delay_time(delay, x): fig = plt.figure() ax = fig.add_subplot(111) plt.grid() plt.title('2018-2020年度各区县馆无编目数据图书平均滞后周期') plt.xticks(range(len(x)), x, rotation = 45) plt.ylabel('无编目数据图书平均滞后周期(月)') plt.xlabel('区县图书馆') # plt.legend(loc='best') xx = range(len(x)) plt.plot(xx, delay, marker = 'o', markersize = 2) plt.axhline(3, color = 'r') for xxy in zip(xx, delay): plt.annotate( str(xxy[1]), xy = xxy, bbox = dict(fc=(1,0.9,0.9)) ) name = 'delay' plt.savefig(name) return if __name__ == '__main__': national = [ 5, 70, 85, 20, 80, 50, 85, 40, 40, 40, 30, 100, ] native = [ 94, 10, 14, 79, 0, 50, 14, 50, 40, 40, 40, 0, ] null = [ 1, 20, 1, 1, 20, 1, 1, 10, 20, 20, 30, 0, ] p18 = [ 22299, 76721, 50234, 50369, 682, 275, 16381, 56261, 16934, 0, 41631, 33474, ] p19 = [ 24528, 28174, 21897, 15263, 1500, 2588, 19133, 23960, 26385, 0, 58390, 23005, ] p20 = [ 0, 14875, 12365, 27157, 0, 3186, 43311, 33822, 38390, 44433, 20068, 18744, ] delay = [ 2, 3, 4, 2, 3, 6, 4, 3, 24, 18, 9, 0.5, ] x = [ '和平馆', '河西馆', '河东馆', '红桥馆', '河北馆', '津南馆', '西青馆', '北辰馆', '东丽馆', '宁河馆', '宝坻馆', '蓟州馆', ] vis_national(national, native, null, x) book_purchase(p18, p19, p20, x) delay_time(delay, x)
[ "matplotlib.pyplot.title", "matplotlib.pyplot.axhline", "matplotlib.rc", "matplotlib.pyplot.plot", "warnings.filterwarnings", "matplotlib.pyplot.bar", "matplotlib.pyplot.legend", "matplotlib.pyplot.figure", "numpy.array", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.xlabel", "matplotlib.pyplot.grid", "matplotlib.pyplot.savefig" ]
[((126, 174), 'matplotlib.rc', 'matplotlib.rc', (['"""font"""'], {'family': '"""AR PL SungtiL GB"""'}), "('font', family='AR PL SungtiL GB')\n", (139, 174), False, 'import matplotlib\n'), ((175, 208), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (198, 208), False, 'import warnings\n'), ((266, 278), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (276, 278), True, 'import matplotlib.pyplot as plt\n'), ((313, 323), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (321, 323), True, 'import matplotlib.pyplot as plt\n'), ((328, 362), 'matplotlib.pyplot.title', 'plt.title', (['"""2018-2020年度各区县馆编目数据占比"""'], {}), "('2018-2020年度各区县馆编目数据占比')\n", (337, 362), True, 'import matplotlib.pyplot as plt\n'), ((575, 625), 'matplotlib.pyplot.bar', 'plt.bar', (['xx', 'national'], {'width': 'width', 'label': 'years[0]'}), '(xx, national, width=width, label=years[0])\n', (582, 625), True, 'import matplotlib.pyplot as plt\n'), ((630, 686), 'matplotlib.pyplot.bar', 'plt.bar', (['(xx + width)', 'native'], {'width': 'width', 'label': 'years[1]'}), '(xx + width, native, width=width, label=years[1])\n', (637, 686), True, 'import matplotlib.pyplot as plt\n'), ((691, 749), 'matplotlib.pyplot.bar', 'plt.bar', (['(xx + 2 * width)', 'null'], {'width': 'width', 'label': 'years[2]'}), '(xx + 2 * width, null, width=width, label=years[2])\n', (698, 749), True, 'import matplotlib.pyplot as plt\n'), ((803, 823), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""百分比(%)"""'], {}), "('百分比(%)')\n", (813, 823), True, 'import matplotlib.pyplot as plt\n'), ((828, 847), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""区县图书馆"""'], {}), "('区县图书馆')\n", (838, 847), True, 'import matplotlib.pyplot as plt\n'), ((852, 874), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (862, 874), True, 'import matplotlib.pyplot as plt\n'), ((879, 905), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(50)'], {'color': '"""r"""'}), "(50, color='r')\n", (890, 905), True, 'import matplotlib.pyplot as plt\n'), ((930, 947), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (941, 947), True, 'import matplotlib.pyplot as plt\n'), ((1009, 1021), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1019, 1021), True, 'import matplotlib.pyplot as plt\n'), ((1056, 1066), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1064, 1066), True, 'import matplotlib.pyplot as plt\n'), ((1071, 1106), 'matplotlib.pyplot.title', 'plt.title', (['"""2018-2020年度各区县馆中文图书入藏量"""'], {}), "('2018-2020年度各区县馆中文图书入藏量')\n", (1080, 1106), True, 'import matplotlib.pyplot as plt\n'), ((1311, 1356), 'matplotlib.pyplot.bar', 'plt.bar', (['xx', 'p18'], {'width': 'width', 'label': 'years[0]'}), '(xx, p18, width=width, label=years[0])\n', (1318, 1356), True, 'import matplotlib.pyplot as plt\n'), ((1361, 1414), 'matplotlib.pyplot.bar', 'plt.bar', (['(xx + width)', 'p19'], {'width': 'width', 'label': 'years[1]'}), '(xx + width, p19, width=width, label=years[1])\n', (1368, 1414), True, 'import matplotlib.pyplot as plt\n'), ((1419, 1476), 'matplotlib.pyplot.bar', 'plt.bar', (['(xx + 2 * width)', 'p20'], {'width': 'width', 'label': 'years[2]'}), '(xx + 2 * width, p20, width=width, label=years[2])\n', (1426, 1476), True, 'import matplotlib.pyplot as plt\n'), ((1530, 1554), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""中文图书入藏量(册)"""'], {}), "('中文图书入藏量(册)')\n", (1540, 1554), True, 'import matplotlib.pyplot as plt\n'), ((1559, 1578), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""区县图书馆"""'], {}), "('区县图书馆')\n", (1569, 1578), True, 'import matplotlib.pyplot as plt\n'), ((1583, 1605), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (1593, 1605), True, 'import matplotlib.pyplot as plt\n'), ((1610, 1639), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(20000)'], {'color': '"""r"""'}), "(20000, color='r')\n", (1621, 1639), True, 'import matplotlib.pyplot as plt\n'), ((2176, 2193), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (2187, 2193), True, 'import matplotlib.pyplot as plt\n'), ((2244, 2256), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2254, 2256), True, 'import matplotlib.pyplot as plt\n'), ((2291, 2301), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2299, 2301), True, 'import matplotlib.pyplot as plt\n'), ((2306, 2347), 'matplotlib.pyplot.title', 'plt.title', (['"""2018-2020年度各区县馆无编目数据图书平均滞后周期"""'], {}), "('2018-2020年度各区县馆无编目数据图书平均滞后周期')\n", (2315, 2347), True, 'import matplotlib.pyplot as plt\n'), ((2400, 2430), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""无编目数据图书平均滞后周期(月)"""'], {}), "('无编目数据图书平均滞后周期(月)')\n", (2410, 2430), True, 'import matplotlib.pyplot as plt\n'), ((2435, 2454), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""区县图书馆"""'], {}), "('区县图书馆')\n", (2445, 2454), True, 'import matplotlib.pyplot as plt\n'), ((2511, 2556), 'matplotlib.pyplot.plot', 'plt.plot', (['xx', 'delay'], {'marker': '"""o"""', 'markersize': '(2)'}), "(xx, delay, marker='o', markersize=2)\n", (2519, 2556), True, 'import matplotlib.pyplot as plt\n'), ((2565, 2590), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(3)'], {'color': '"""r"""'}), "(3, color='r')\n", (2576, 2590), True, 'import matplotlib.pyplot as plt\n'), ((2766, 2783), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (2777, 2783), True, 'import matplotlib.pyplot as plt\n'), ((1665, 1678), 'numpy.array', 'np.array', (['p18'], {}), '(p18)\n', (1673, 1678), True, 'import numpy as np\n'), ((1702, 1715), 'numpy.array', 'np.array', (['p19'], {}), '(p19)\n', (1710, 1715), True, 'import numpy as np\n'), ((1739, 1752), 'numpy.array', 'np.array', (['p20'], {}), '(p20)\n', (1747, 1752), True, 'import numpy as np\n')]
#!/usr/bin/env bash #!/bin/bash #!/bin/sh #!/bin/sh - from vk_api.utils import get_random_id from vk_api.bot_longpoll import VkBotLongPoll from vk_api import VkUpload import requests import vk_api import time import bot_functions import bot_variable if bot_variable.flag_repository: start_path = "" else: start_path = '/root/bot_herobot_chat/' if bot_variable.flag_smile: smile = bot_variable.smile_1 else: smile = bot_variable.smile_2 f = open('{}token.txt'.format(start_path), 'r') token = f.read() f.close() session = requests.Session() vk_session = vk_api.VkApi(token=token) longpoll = VkBotLongPoll(vk_session, '178949259') vk = vk_session.get_api() upload = VkUpload(vk_session) def main(): file_stat = open('{}logs_chat.txt'.format(start_path), 'r', encoding="utf-8") sl = {} for line in file_stat: line_1 = line line = line.split('*_*') if len(line) > 1 and line[3] == ' 1 ': number_words_in_message = 0 for i in line[4].split(' '): if i != " " and i != "\n" and i != "": number_words_in_message += 1 if line[2][1:-1] not in sl: last_people = line[2][1:-1] sl[line[2][1:-1]] = number_words_in_message else: last_people = line[2][1:-1] sl[line[2][1:-1]] += number_words_in_message elif len(line) == 1 and line != "\n": number_words_in_message = 0 for i in line_1.split(' '): if i != " " and i != "\n" and i != "": number_words_in_message += 1 sl[last_people] += number_words_in_message file_stat.close() file_word_in_week = open('{}word_in_week.txt'.format(start_path), 'r', encoding='utf-8') sl_1 = {} for line in file_word_in_week: sl_1[line.split('*_*')[0]] = int(line.split('*_*')[1][:-1:]) file_word_in_week.close() week_result = {} for i in sl: week_result[i] = int(sl[i]) - int(sl_1[i]) file_word_in_week = open('{}word_in_week.txt'.format(start_path), 'w', encoding='utf-8') for i in sl: file_word_in_week.write(i+'*_*'+str(sl[i])+'\n') file_word_in_week.close() stats = [] kolp = [] for i in week_result: kolp.append(week_result[i]) kolp.sort() kolp.reverse() jstr = [] for i in kolp: for j in week_result: if j == '' or j == '\n': continue if str(week_result[j]) == str(i) and j not in jstr: jstr.append(j) number_2 = '' for k in str(week_result[j]): number_2 += smile[k] fio_1 = requests.get("https://api.vk.com/method/users.get?user_ids=" + str(j) + "&fields=bdate&access_token=" + token + "&v=5.92").json() first_name_1 = fio_1["response"][0]["first_name"] last_name_1 = fio_1["response"][0]["last_name"] stats.append(first_name_1 + ' ' + last_name_1 + ': ' + str( number_2) + ' слов(а).\n') for i in range(0, len(stats)): stats[i] = str(i + 1) + ") " + stats[i] pizdabol = stats[0][stats[0].index(' ')+1:stats[0].index(':'):] stats = ''.join(stats) stats = "🔝 ТОП слов в беседе за прошедшую неделю:\n\n" + stats for i in bot_variable.spisok_chata: if bot_variable.spisok_chata[i] == pizdabol: pizdabol_id = i vk.messages.send( user_id=195310233, random_id=get_random_id(), message='Пиздабол недели обновлен в фоновом режиме, это [id' + str(pizdabol_id) + '|' + pizdabol + "]" ) attachments = [] image_url = 'https://sun9-47.userapi.com/XOoZN_1DA7BZKe_QiWyPiKvCriZUFNKltkOe1A/nYo9ZMZUegw.jpg' image = session.get(image_url, stream=True) photo = upload.photo_messages(photos=image.raw)[0] attachments.append('photo{}_{}'.format(photo['owner_id'], photo['id'])) vk.messages.send( chat_id=1, random_id=get_random_id(), attachment=','.join(attachments), message='Пиздабол недели [id' + str(pizdabol_id) + '|' + pizdabol + "]"+"!" ) time.sleep(5) vk.messages.send( chat_id=1, random_id=get_random_id(), message=stats ) if __name__ == "__main__": main()
[ "vk_api.utils.get_random_id", "requests.Session", "vk_api.VkUpload", "time.sleep", "vk_api.VkApi", "vk_api.bot_longpoll.VkBotLongPoll" ]
[((542, 560), 'requests.Session', 'requests.Session', ([], {}), '()\n', (558, 560), False, 'import requests\n'), ((574, 599), 'vk_api.VkApi', 'vk_api.VkApi', ([], {'token': 'token'}), '(token=token)\n', (586, 599), False, 'import vk_api\n'), ((611, 649), 'vk_api.bot_longpoll.VkBotLongPoll', 'VkBotLongPoll', (['vk_session', '"""178949259"""'], {}), "(vk_session, '178949259')\n", (624, 649), False, 'from vk_api.bot_longpoll import VkBotLongPoll\n'), ((685, 705), 'vk_api.VkUpload', 'VkUpload', (['vk_session'], {}), '(vk_session)\n', (693, 705), False, 'from vk_api import VkUpload\n'), ((4224, 4237), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (4234, 4237), False, 'import time\n'), ((3575, 3590), 'vk_api.utils.get_random_id', 'get_random_id', ([], {}), '()\n', (3588, 3590), False, 'from vk_api.utils import get_random_id\n'), ((4071, 4086), 'vk_api.utils.get_random_id', 'get_random_id', ([], {}), '()\n', (4084, 4086), False, 'from vk_api.utils import get_random_id\n'), ((4297, 4312), 'vk_api.utils.get_random_id', 'get_random_id', ([], {}), '()\n', (4310, 4312), False, 'from vk_api.utils import get_random_id\n')]
from sympy import (symbols, FunctionMatrix, MatrixExpr, Lambda, Matrix) def test_funcmatrix(): i, j = symbols('i,j') X = FunctionMatrix(3, 3, Lambda((i, j), i - j)) assert X[1, 1] == 0 assert X[1, 2] == -1 assert X.shape == (3, 3) assert X.rows == X.cols == 3 assert Matrix(X) == Matrix(3, 3, lambda i, j: i - j) assert isinstance(X*X + X, MatrixExpr)
[ "sympy.symbols", "sympy.Lambda", "sympy.Matrix" ]
[((108, 122), 'sympy.symbols', 'symbols', (['"""i,j"""'], {}), "('i,j')\n", (115, 122), False, 'from sympy import symbols, FunctionMatrix, MatrixExpr, Lambda, Matrix\n'), ((152, 173), 'sympy.Lambda', 'Lambda', (['(i, j)', '(i - j)'], {}), '((i, j), i - j)\n', (158, 173), False, 'from sympy import symbols, FunctionMatrix, MatrixExpr, Lambda, Matrix\n'), ((297, 306), 'sympy.Matrix', 'Matrix', (['X'], {}), '(X)\n', (303, 306), False, 'from sympy import symbols, FunctionMatrix, MatrixExpr, Lambda, Matrix\n'), ((310, 342), 'sympy.Matrix', 'Matrix', (['(3)', '(3)', '(lambda i, j: i - j)'], {}), '(3, 3, lambda i, j: i - j)\n', (316, 342), False, 'from sympy import symbols, FunctionMatrix, MatrixExpr, Lambda, Matrix\n')]
from webdriver_manager.chrome import ChromeDriverManager from bs4 import BeautifulSoup as bs from selenium import webdriver import json browser = webdriver.Chrome(ChromeDriverManager().install()) browser.get("https://www.asdc.asi.it/bzcat/") page = browser.execute_script("setPageSizeValue(0); setHead(Head, 1, 0); writeBottom(); setHead(Head, 1, 2); writeBottom(); setHead(Head, 1, 2); writeBottom(); setHead(Head, 1, 3); writeBottom(); setHead(Head, 1, 6); writeBottom(); return document.body.innerHTML") soup = bs(page, "html.parser") bottom = soup.find("div", {"id": "bottom"}) lines = [] table = bottom.find("table", {"class":"table_catalog"}) tbody = table.find("tbody") for tr in tbody.findAll("tr", {"id":lambda x: x == "second_line" or x == "first_line"}): lines.append([i.text.strip() for i in tr.findAll("td")][3:7]) f = open("../data/blazar2.json", "w") final = [{"a": a, "ra": ra, "de": de, "z": z} for a, ra, de, z in lines] f.write(json.dumps( final, sort_keys=True, indent=4, separators=(",", ": ") ) ) f.close() print("finished")
[ "bs4.BeautifulSoup", "webdriver_manager.chrome.ChromeDriverManager", "json.dumps" ]
[((527, 550), 'bs4.BeautifulSoup', 'bs', (['page', '"""html.parser"""'], {}), "(page, 'html.parser')\n", (529, 550), True, 'from bs4 import BeautifulSoup as bs\n'), ((985, 1052), 'json.dumps', 'json.dumps', (['final'], {'sort_keys': '(True)', 'indent': '(4)', 'separators': "(',', ': ')"}), "(final, sort_keys=True, indent=4, separators=(',', ': '))\n", (995, 1052), False, 'import json\n'), ((169, 190), 'webdriver_manager.chrome.ChromeDriverManager', 'ChromeDriverManager', ([], {}), '()\n', (188, 190), False, 'from webdriver_manager.chrome import ChromeDriverManager\n')]
# Copyright 2019 Graphcore Ltd. import tensorflow as tf import os import time import argparse import numpy as np import random from tensorflow.python.ipu.scopes import ipu_scope from tensorflow.python.ipu import ipu_compiler from seq2seq_edits import AttentionWrapperNoAssert, dynamic_decode, TrainingHelperNoCond, GreedyEmbeddingHelperNoCond from data_gen.reader import Data, Vocabulary from tensorflow.python.ipu import utils import util try: import __builtin__ input = getattr(__builtin__, 'raw_input') except (ImportError, AttributeError): pass tf.logging.set_verbosity(tf.logging.ERROR) time_major = True DTYPE = tf.float16 forget_bias = 1.0 max_gradient_norm = 1 learning_rate = 1 CHECKPOINT_FILE = './weights/' def print_data(src, src_vocab, tgt, tgt_vocab): for i, s in enumerate(src.T): t = tgt.T[i] src_end_idx = list(s).index(src_vocab.end_id()) try: tgt_end_idx = list(t).index(tgt_vocab.end_id()) except ValueError: tgt_end_idx = len(t) - 1 print("{} -> {}".format( ''.join(src_vocab.int_to_string(s[:src_end_idx])), ''.join(tgt_vocab.int_to_string(t[:tgt_end_idx])), )) class Nmt(object): def __init__(self, opts): self.opts = opts self.src_length = opts.sequence_length self.tgt_length = 11 # YYYY-MM-DD<eot> def _build_generator(self, data, vocab): instance_id = range(len(data.inputs)) priming = True if priming and self.opts.infer: priming = False batch_ids = random.sample(instance_id, self.opts.batch_size) src = np.array(data.inputs[batch_ids], dtype=np.int32) yield {self.placeholders['source']: src.T, } while True: batch_ids = random.sample(instance_id, self.opts.batch_size) src = np.array(data.inputs[batch_ids], dtype=np.int32) if self.opts.infer: if self.opts.interact: src = np.array([vocab[0].string_to_int(input("Enter a human date: ").strip())]) yield { self.placeholders['source']: src.T, } else: tgt = np.roll(np.array(data.targets[batch_ids], dtype=np.int32), 1) tgt[:, 0] = self.start_id lbl = np.array(data.targets[batch_ids], dtype=np.int32) mask = np.zeros(lbl.shape) for i, label in enumerate(lbl): end_idx = list(label).index(self.end_id) mask[i][:end_idx+1] = 1 yield { self.placeholders['source']: src.T, self.placeholders['target']: tgt.T, self.placeholders['label']: lbl.T, self.placeholders['mask']: mask.T } def _build_inputs(self): input_vocab = Vocabulary('./data/human_vocab.json', padding=self.src_length) output_vocab = Vocabulary('./data/machine_vocab.json', padding=self.tgt_length) self.src_vocab_size = input_vocab.size() self.tgt_vocab_size = output_vocab.size() self.start_id = output_vocab.start_id() self.end_id = output_vocab.end_id() data_file = './data/validation.csv' if self.opts.infer else './data/training.csv' data = Data(data_file, input_vocab, output_vocab) data.load() data.transform() self.placeholders = { 'source': tf.placeholder(tf.int32, shape=[self.src_length, self.opts.batch_size], name="source"), 'target': tf.placeholder(tf.int32, shape=[self.tgt_length, self.opts.batch_size], name="target"), 'label': tf.placeholder(tf.int32, shape=[self.tgt_length, self.opts.batch_size], name="label"), 'mask': tf.placeholder_with_default( tf.constant(1, shape=[self.tgt_length, self.opts.batch_size], dtype=tf.float16), [self.tgt_length, self.opts.batch_size], name="mask") } vocab = (input_vocab, output_vocab) generator = self._build_generator(data, vocab) return generator, vocab def infer(self): def build_infer(): embedding = Nmt._build_embedding(self.src_vocab_size, self.opts.embedding_size, name="source_embedding") input_, encoder_outputs, encoder_state = self._build_encoder(embedding) embedding = Nmt._build_embedding(self.tgt_vocab_size, self.opts.embedding_size, name="tgt_embedding") samples, logits = self._build_decoder(encoder_outputs, encoder_state, embedding, train=False) return samples, logits with ipu_scope('/device:IPU:0'): data, vocab = self._build_inputs() batch = ipu_compiler.compile(build_infer, []) # Create a restoring object saver = tf.train.Saver() ipu_options = util.get_config(report_n=0) utils.configure_ipu_system(ipu_options) session = tf.Session() checkpoint = CHECKPOINT_FILE + 'ckpt' saver.restore(session, checkpoint) # Run a dummy value to force the graph compilation session.run(batch, feed_dict=next(data)) while True: feed_dict = next(data) predictions, _ = session.run(batch, feed_dict=feed_dict) print_data(feed_dict[self.placeholders['source']], vocab[0], predictions, vocab[1]) if not self.opts.interact: break def train(self): def build_train(): embedding = Nmt._build_embedding(self.src_vocab_size, self.opts.embedding_size, name="source_embedding") input_, encoder_outputs, encoder_state = self._build_encoder(embedding) embedding = Nmt._build_embedding(self.tgt_vocab_size, self.opts.embedding_size, name="tgt_embedding") samples, logits = self._build_decoder(encoder_outputs, encoder_state, embedding, train=True) loss, update = self._build_optimiser(logits) return loss, samples, logits, update with ipu_scope('/device:IPU:0'): data, _ = self._build_inputs() batch = ipu_compiler.compile(build_train, []) # Create a restoring object saver = tf.train.Saver() if self.opts.save_graph: # Dump the graph to a logdir writer = tf.summary.FileWriter(os.path.join('./logs', 'NMT', time.strftime('%Y%m%d_%H%M%S_%Z'))) writer.add_graph(tf.get_default_graph()) ipu_options = util.get_config(report_n=0) utils.configure_ipu_system(ipu_options) session = tf.Session() checkpoint = CHECKPOINT_FILE + 'ckpt' if self.opts.ckpt: saver.restore(session, checkpoint) else: utils.move_variable_initialization_to_cpu() session.run(tf.global_variables_initializer()) print("Init done.") session.run(batch, feed_dict=next(data)) # Warmup duration = 0 avg_loss = 0 best_loss = float('Inf') for e in range(1, 1 + self.opts.steps): start = time.time() l, _, _ = session.run(batch, feed_dict=next(data)) duration += time.time() - start avg_loss += l if (e <= 1000 and not e % 100) or not e % 1000: duration /= 100 if e <= 1000 else 1000 avg_loss /= 100 if e <= 1000 else 1000 print("Step: {:>5}. Average Loss {:.3}. Items/sec {:.4}. Tokens/sec {}".format( e, avg_loss, self.opts.batch_size / duration, self.opts.batch_size * (self.src_length + self.tgt_length) / duration)) if avg_loss < best_loss: best_loss = avg_loss saver.save(session, checkpoint) duration = 0 avg_loss = 0 @staticmethod def _build_embedding(vocab_size, embedding_size, name="embedding"): with tf.variable_scope("embedding", dtype=DTYPE, use_resource=True) as scope: # Random embedding embedding = tf.get_variable( name, [vocab_size, embedding_size], scope.dtype, initializer=tf.initializers.random_uniform(maxval=1.0, dtype=scope.dtype), trainable=False) return embedding @staticmethod def _build_cell(num_units, num_layers): if num_layers is 1: return tf.contrib.rnn.BasicLSTMCell(num_units, forget_bias=forget_bias, state_is_tuple=False) cell_list = [] for i in range(num_layers): cell_list.append(tf.contrib.rnn.BasicLSTMCell( num_units, forget_bias=forget_bias, state_is_tuple=False)) return tf.contrib.rnn.MultiRNNCell(cell_list) def _build_encoder(self, embedding): with tf.variable_scope("input", dtype=DTYPE, use_resource=True) as scope: source = self.placeholders['source'] encoder_emb_inp = tf.nn.embedding_lookup( embedding, source) with tf.variable_scope("encoder", dtype=DTYPE, use_resource=True) as scope: # use resource dtype = scope.dtype cell = Nmt._build_cell(self.opts.num_units, self.opts.num_layers) if self.opts.bi: outputs, states = tf.nn.bidirectional_dynamic_rnn( cell, Nmt._build_cell(self.opts.num_units, self.opts.num_layers), encoder_emb_inp, dtype=dtype, time_major=time_major, swap_memory=False) encoder_outputs = tf.add_n(outputs) encoder_state = states[0] + states[1] else: encoder_outputs, encoder_state = tf.nn.dynamic_rnn( cell, encoder_emb_inp, dtype=dtype, time_major=time_major, swap_memory=False) return source, encoder_outputs, encoder_state def _build_attention(self, encoder_outputs, decoder_cell): with tf.variable_scope("attention", dtype=DTYPE, use_resource=True) as scope: # Attention is batch major inputs = tf.transpose(encoder_outputs, [1, 0, 2]) if self.opts.attention == "luong": attention_mechanism = tf.contrib.seq2seq.LuongAttention( self.opts.num_units, inputs, dtype=scope.dtype, ) else: attention_mechanism = tf.contrib.seq2seq.BahdanauAttention( self.opts.num_units, inputs, dtype=scope.dtype, ) return AttentionWrapperNoAssert( decoder_cell, attention_mechanism) def _build_decoder(self, encoder_outputs, encoder_state, embedding, train=False): with tf.variable_scope("decoder", dtype=DTYPE, use_resource=True) as decoder_scope: dtype = decoder_scope.dtype tgt_length = self.src_length * 2 decoder_num_units = self.opts.num_units atten_num_units = self.opts.num_units # RNN Cell cell = Nmt._build_cell(decoder_num_units, self.opts.num_layers) initial_state = encoder_state # Attention wrapper if self.opts.attention: cell = self._build_attention(encoder_outputs, cell) initial_state = tf.contrib.seq2seq.AttentionWrapperState( cell_state=encoder_state, attention=tf.zeros([self.opts.batch_size, atten_num_units], dtype), time=tf.constant(0, tf.int32), alignments=tf.zeros([self.opts.batch_size, self.src_length], dtype), alignment_history=(), attention_state=tf.zeros([self.opts.batch_size, self.src_length], dtype) ) # Projection Layer projection_layer = tf.layers.Dense(units=self.tgt_vocab_size, use_bias=False, name="projection") if train: tgt_length = self.tgt_length target = self.placeholders['target'] decoder_emb_inp = tf.nn.embedding_lookup( embedding, target) helper = TrainingHelperNoCond( decoder_emb_inp, np.full([self.opts.batch_size], tgt_length, dtype=np.int32), time_major=time_major) else: # Inference tgt_sos_id = self.start_id tgt_eos_id = self.end_id start_tokens = np.full([self.opts.batch_size], tgt_sos_id, dtype=np.int32) end_token = tgt_eos_id helper = GreedyEmbeddingHelperNoCond( embedding, start_tokens, end_token) decoder = tf.contrib.seq2seq.BasicDecoder( cell, helper, initial_state=initial_state, output_layer=projection_layer if not train else None # applied per timestep ) # Dynamic decoding outputs, final_context_state, _ = dynamic_decode( # Contains the XLA check decoder, maximum_iterations=tgt_length, # Required for static TensorArrays output_time_major=time_major, swap_memory=False, scope=decoder_scope) if train: # Specify dynamic shapes to avoid Assert logits = outputs.rnn_output logits.set_shape([tgt_length, self.opts.batch_size, atten_num_units]) logits = projection_layer(logits) return outputs.sample_id, logits else: return outputs.sample_id, outputs.rnn_output def _build_optimiser(self, logits): with tf.variable_scope("loss", use_resource=True): labels = self.placeholders['label'] mask = self.placeholders['mask'] # Logits is dynamic so an Assert is added to check shapes crossent = tf.nn.sparse_softmax_cross_entropy_with_logits( labels=labels, logits=logits) train_loss = (tf.reduce_sum(crossent*mask) / self.opts.batch_size) # Calculate and clip gradients params = tf.trainable_variables() gradients = tf.gradients(train_loss, params) clipped_gradients = [tf.clip_by_norm(grad, max_gradient_norm) for grad in gradients] optimizer = tf.train.GradientDescentOptimizer(learning_rate) update_step = optimizer.apply_gradients( zip(clipped_gradients, params)) return train_loss, update_step if __name__ == '__main__': parser = argparse.ArgumentParser(description='NMT model in TensorFlow to run on the IPU') parser.add_argument('--infer', action="store_true", help="Inference Only") parser.add_argument('--bi', action="store_true", help="Use bidirectional layer in encoder (with outputs summed)") parser.add_argument('--attention', choices=['luong', 'bahdanau'], default='luong', help="Add an attention model") parser.add_argument('--batch-size', type=int, default=1, help="Set batch-size") parser.add_argument('--num-units', type=int, default=512, help="Number of units in each LSTM cell") parser.add_argument('--num-layers', type=int, default=1, help="Size of LSTM stack in the encoder and decoder") parser.add_argument('--embedding-size', type=int, default=32, help="Size of source and target embedding") parser.add_argument('--sequence-length', type=int, default=20, help="Size of input length (by padding or truncating)") parser.add_argument('--ckpt', action="store_true", help="load weights from latest checkpoint") parser.add_argument('--seed', type=int, default=1984, help="Random seed") parser.add_argument('--interact', action="store_true", help="Perform inference on values entered from the command line") parser.add_argument('--save-graph', action="store_true", help="Save the graph to './logs' to be viewed by TensorBoard") parser.add_argument('--steps', type=int, default=50000, help="Number of steps to complete in training") args = parser.parse_args() random.seed(args.seed) if args.interact: args.batch_size = 1 args.infer = True print("NMT {}.\n Batch size: {}. Hidden units: {}. Layers: {}.".format( "Inference" if args.infer else "Training", args.batch_size, args.num_units, args.num_layers)) n = Nmt(args) if args.infer: n.infer() else: n.train()
[ "tensorflow.contrib.seq2seq.BahdanauAttention", "util.get_config", "tensorflow.reduce_sum", "tensorflow.contrib.seq2seq.LuongAttention", "argparse.ArgumentParser", "tensorflow.trainable_variables", "seq2seq_edits.GreedyEmbeddingHelperNoCond", "random.sample", "time.strftime", "tensorflow.logging.set_verbosity", "tensorflow.contrib.seq2seq.BasicDecoder", "tensorflow.get_default_graph", "data_gen.reader.Data", "tensorflow.layers.Dense", "numpy.full", "tensorflow.add_n", "tensorflow.variable_scope", "tensorflow.python.ipu.ipu_compiler.compile", "tensorflow.placeholder", "seq2seq_edits.AttentionWrapperNoAssert", "random.seed", "tensorflow.gradients", "tensorflow.clip_by_norm", "tensorflow.contrib.rnn.MultiRNNCell", "seq2seq_edits.dynamic_decode", "tensorflow.python.ipu.utils.configure_ipu_system", "tensorflow.initializers.random_uniform", "tensorflow.train.Saver", "tensorflow.nn.embedding_lookup", "tensorflow.global_variables_initializer", "tensorflow.Session", "tensorflow.constant", "tensorflow.transpose", "tensorflow.contrib.rnn.BasicLSTMCell", "data_gen.reader.Vocabulary", "tensorflow.train.GradientDescentOptimizer", "tensorflow.python.ipu.scopes.ipu_scope", "tensorflow.nn.dynamic_rnn", "numpy.zeros", "time.time", "tensorflow.zeros", "numpy.array", "tensorflow.python.ipu.utils.move_variable_initialization_to_cpu", "tensorflow.nn.sparse_softmax_cross_entropy_with_logits" ]
[((566, 608), 'tensorflow.logging.set_verbosity', 'tf.logging.set_verbosity', (['tf.logging.ERROR'], {}), '(tf.logging.ERROR)\n', (590, 608), True, 'import tensorflow as tf\n'), ((14999, 15084), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""NMT model in TensorFlow to run on the IPU"""'}), "(description='NMT model in TensorFlow to run on the IPU'\n )\n", (15022, 15084), False, 'import argparse\n'), ((16813, 16835), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (16824, 16835), False, 'import random\n'), ((2917, 2979), 'data_gen.reader.Vocabulary', 'Vocabulary', (['"""./data/human_vocab.json"""'], {'padding': 'self.src_length'}), "('./data/human_vocab.json', padding=self.src_length)\n", (2927, 2979), False, 'from data_gen.reader import Data, Vocabulary\n'), ((3003, 3067), 'data_gen.reader.Vocabulary', 'Vocabulary', (['"""./data/machine_vocab.json"""'], {'padding': 'self.tgt_length'}), "('./data/machine_vocab.json', padding=self.tgt_length)\n", (3013, 3067), False, 'from data_gen.reader import Data, Vocabulary\n'), ((3366, 3408), 'data_gen.reader.Data', 'Data', (['data_file', 'input_vocab', 'output_vocab'], {}), '(data_file, input_vocab, output_vocab)\n', (3370, 3408), False, 'from data_gen.reader import Data, Vocabulary\n'), ((4936, 4952), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (4950, 4952), True, 'import tensorflow as tf\n'), ((4976, 5003), 'util.get_config', 'util.get_config', ([], {'report_n': '(0)'}), '(report_n=0)\n', (4991, 5003), False, 'import util\n'), ((5012, 5051), 'tensorflow.python.ipu.utils.configure_ipu_system', 'utils.configure_ipu_system', (['ipu_options'], {}), '(ipu_options)\n', (5038, 5051), False, 'from tensorflow.python.ipu import utils\n'), ((5070, 5082), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5080, 5082), True, 'import tensorflow as tf\n'), ((6377, 6393), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (6391, 6393), True, 'import tensorflow as tf\n'), ((6654, 6681), 'util.get_config', 'util.get_config', ([], {'report_n': '(0)'}), '(report_n=0)\n', (6669, 6681), False, 'import util\n'), ((6690, 6729), 'tensorflow.python.ipu.utils.configure_ipu_system', 'utils.configure_ipu_system', (['ipu_options'], {}), '(ipu_options)\n', (6716, 6729), False, 'from tensorflow.python.ipu import utils\n'), ((6748, 6760), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (6758, 6760), True, 'import tensorflow as tf\n'), ((8908, 8946), 'tensorflow.contrib.rnn.MultiRNNCell', 'tf.contrib.rnn.MultiRNNCell', (['cell_list'], {}), '(cell_list)\n', (8935, 8946), True, 'import tensorflow as tf\n'), ((14582, 14606), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (14604, 14606), True, 'import tensorflow as tf\n'), ((14627, 14659), 'tensorflow.gradients', 'tf.gradients', (['train_loss', 'params'], {}), '(train_loss, params)\n', (14639, 14659), True, 'import tensorflow as tf\n'), ((14775, 14823), 'tensorflow.train.GradientDescentOptimizer', 'tf.train.GradientDescentOptimizer', (['learning_rate'], {}), '(learning_rate)\n', (14808, 14823), True, 'import tensorflow as tf\n'), ((1587, 1635), 'random.sample', 'random.sample', (['instance_id', 'self.opts.batch_size'], {}), '(instance_id, self.opts.batch_size)\n', (1600, 1635), False, 'import random\n'), ((1654, 1702), 'numpy.array', 'np.array', (['data.inputs[batch_ids]'], {'dtype': 'np.int32'}), '(data.inputs[batch_ids], dtype=np.int32)\n', (1662, 1702), True, 'import numpy as np\n'), ((1804, 1852), 'random.sample', 'random.sample', (['instance_id', 'self.opts.batch_size'], {}), '(instance_id, self.opts.batch_size)\n', (1817, 1852), False, 'import random\n'), ((1871, 1919), 'numpy.array', 'np.array', (['data.inputs[batch_ids]'], {'dtype': 'np.int32'}), '(data.inputs[batch_ids], dtype=np.int32)\n', (1879, 1919), True, 'import numpy as np\n'), ((3507, 3597), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'shape': '[self.src_length, self.opts.batch_size]', 'name': '"""source"""'}), "(tf.int32, shape=[self.src_length, self.opts.batch_size],\n name='source')\n", (3521, 3597), True, 'import tensorflow as tf\n'), ((3617, 3707), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'shape': '[self.tgt_length, self.opts.batch_size]', 'name': '"""target"""'}), "(tf.int32, shape=[self.tgt_length, self.opts.batch_size],\n name='target')\n", (3631, 3707), True, 'import tensorflow as tf\n'), ((3726, 3815), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32'], {'shape': '[self.tgt_length, self.opts.batch_size]', 'name': '"""label"""'}), "(tf.int32, shape=[self.tgt_length, self.opts.batch_size],\n name='label')\n", (3740, 3815), True, 'import tensorflow as tf\n'), ((4750, 4776), 'tensorflow.python.ipu.scopes.ipu_scope', 'ipu_scope', (['"""/device:IPU:0"""'], {}), "('/device:IPU:0')\n", (4759, 4776), False, 'from tensorflow.python.ipu.scopes import ipu_scope\n'), ((4845, 4882), 'tensorflow.python.ipu.ipu_compiler.compile', 'ipu_compiler.compile', (['build_infer', '[]'], {}), '(build_infer, [])\n', (4865, 4882), False, 'from tensorflow.python.ipu import ipu_compiler\n'), ((6195, 6221), 'tensorflow.python.ipu.scopes.ipu_scope', 'ipu_scope', (['"""/device:IPU:0"""'], {}), "('/device:IPU:0')\n", (6204, 6221), False, 'from tensorflow.python.ipu.scopes import ipu_scope\n'), ((6286, 6323), 'tensorflow.python.ipu.ipu_compiler.compile', 'ipu_compiler.compile', (['build_train', '[]'], {}), '(build_train, [])\n', (6306, 6323), False, 'from tensorflow.python.ipu import ipu_compiler\n'), ((6907, 6950), 'tensorflow.python.ipu.utils.move_variable_initialization_to_cpu', 'utils.move_variable_initialization_to_cpu', ([], {}), '()\n', (6948, 6950), False, 'from tensorflow.python.ipu import utils\n'), ((7241, 7252), 'time.time', 'time.time', ([], {}), '()\n', (7250, 7252), False, 'import time\n'), ((8146, 8208), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""embedding"""'], {'dtype': 'DTYPE', 'use_resource': '(True)'}), "('embedding', dtype=DTYPE, use_resource=True)\n", (8163, 8208), True, 'import tensorflow as tf\n'), ((8599, 8689), 'tensorflow.contrib.rnn.BasicLSTMCell', 'tf.contrib.rnn.BasicLSTMCell', (['num_units'], {'forget_bias': 'forget_bias', 'state_is_tuple': '(False)'}), '(num_units, forget_bias=forget_bias,\n state_is_tuple=False)\n', (8627, 8689), True, 'import tensorflow as tf\n'), ((9002, 9060), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""input"""'], {'dtype': 'DTYPE', 'use_resource': '(True)'}), "('input', dtype=DTYPE, use_resource=True)\n", (9019, 9060), True, 'import tensorflow as tf\n'), ((9150, 9191), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['embedding', 'source'], {}), '(embedding, source)\n', (9172, 9191), True, 'import tensorflow as tf\n'), ((9223, 9283), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""encoder"""'], {'dtype': 'DTYPE', 'use_resource': '(True)'}), "('encoder', dtype=DTYPE, use_resource=True)\n", (9240, 9283), True, 'import tensorflow as tf\n'), ((10277, 10339), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""attention"""'], {'dtype': 'DTYPE', 'use_resource': '(True)'}), "('attention', dtype=DTYPE, use_resource=True)\n", (10294, 10339), True, 'import tensorflow as tf\n'), ((10410, 10450), 'tensorflow.transpose', 'tf.transpose', (['encoder_outputs', '[1, 0, 2]'], {}), '(encoder_outputs, [1, 0, 2])\n', (10422, 10450), True, 'import tensorflow as tf\n'), ((10946, 11005), 'seq2seq_edits.AttentionWrapperNoAssert', 'AttentionWrapperNoAssert', (['decoder_cell', 'attention_mechanism'], {}), '(decoder_cell, attention_mechanism)\n', (10970, 11005), False, 'from seq2seq_edits import AttentionWrapperNoAssert, dynamic_decode, TrainingHelperNoCond, GreedyEmbeddingHelperNoCond\n'), ((11123, 11183), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""decoder"""'], {'dtype': 'DTYPE', 'use_resource': '(True)'}), "('decoder', dtype=DTYPE, use_resource=True)\n", (11140, 11183), True, 'import tensorflow as tf\n'), ((12236, 12313), 'tensorflow.layers.Dense', 'tf.layers.Dense', ([], {'units': 'self.tgt_vocab_size', 'use_bias': '(False)', 'name': '"""projection"""'}), "(units=self.tgt_vocab_size, use_bias=False, name='projection')\n", (12251, 12313), True, 'import tensorflow as tf\n'), ((13096, 13228), 'tensorflow.contrib.seq2seq.BasicDecoder', 'tf.contrib.seq2seq.BasicDecoder', (['cell', 'helper'], {'initial_state': 'initial_state', 'output_layer': '(projection_layer if not train else None)'}), '(cell, helper, initial_state=initial_state,\n output_layer=projection_layer if not train else None)\n', (13127, 13228), True, 'import tensorflow as tf\n'), ((13409, 13538), 'seq2seq_edits.dynamic_decode', 'dynamic_decode', (['decoder'], {'maximum_iterations': 'tgt_length', 'output_time_major': 'time_major', 'swap_memory': '(False)', 'scope': 'decoder_scope'}), '(decoder, maximum_iterations=tgt_length, output_time_major=\n time_major, swap_memory=False, scope=decoder_scope)\n', (13423, 13538), False, 'from seq2seq_edits import AttentionWrapperNoAssert, dynamic_decode, TrainingHelperNoCond, GreedyEmbeddingHelperNoCond\n'), ((14119, 14163), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""loss"""'], {'use_resource': '(True)'}), "('loss', use_resource=True)\n", (14136, 14163), True, 'import tensorflow as tf\n'), ((14352, 14428), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'labels': 'labels', 'logits': 'logits'}), '(labels=labels, logits=logits)\n', (14398, 14428), True, 'import tensorflow as tf\n'), ((14690, 14730), 'tensorflow.clip_by_norm', 'tf.clip_by_norm', (['grad', 'max_gradient_norm'], {}), '(grad, max_gradient_norm)\n', (14705, 14730), True, 'import tensorflow as tf\n'), ((2355, 2404), 'numpy.array', 'np.array', (['data.targets[batch_ids]'], {'dtype': 'np.int32'}), '(data.targets[batch_ids], dtype=np.int32)\n', (2363, 2404), True, 'import numpy as np\n'), ((2428, 2447), 'numpy.zeros', 'np.zeros', (['lbl.shape'], {}), '(lbl.shape)\n', (2436, 2447), True, 'import numpy as np\n'), ((3878, 3957), 'tensorflow.constant', 'tf.constant', (['(1)'], {'shape': '[self.tgt_length, self.opts.batch_size]', 'dtype': 'tf.float16'}), '(1, shape=[self.tgt_length, self.opts.batch_size], dtype=tf.float16)\n', (3889, 3957), True, 'import tensorflow as tf\n'), ((6607, 6629), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (6627, 6629), True, 'import tensorflow as tf\n'), ((6975, 7008), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (7006, 7008), True, 'import tensorflow as tf\n'), ((7340, 7351), 'time.time', 'time.time', ([], {}), '()\n', (7349, 7351), False, 'import time\n'), ((8774, 8864), 'tensorflow.contrib.rnn.BasicLSTMCell', 'tf.contrib.rnn.BasicLSTMCell', (['num_units'], {'forget_bias': 'forget_bias', 'state_is_tuple': '(False)'}), '(num_units, forget_bias=forget_bias,\n state_is_tuple=False)\n', (8802, 8864), True, 'import tensorflow as tf\n'), ((9809, 9826), 'tensorflow.add_n', 'tf.add_n', (['outputs'], {}), '(outputs)\n', (9817, 9826), True, 'import tensorflow as tf\n'), ((9948, 10047), 'tensorflow.nn.dynamic_rnn', 'tf.nn.dynamic_rnn', (['cell', 'encoder_emb_inp'], {'dtype': 'dtype', 'time_major': 'time_major', 'swap_memory': '(False)'}), '(cell, encoder_emb_inp, dtype=dtype, time_major=time_major,\n swap_memory=False)\n', (9965, 10047), True, 'import tensorflow as tf\n'), ((10537, 10623), 'tensorflow.contrib.seq2seq.LuongAttention', 'tf.contrib.seq2seq.LuongAttention', (['self.opts.num_units', 'inputs'], {'dtype': 'scope.dtype'}), '(self.opts.num_units, inputs, dtype=scope.\n dtype)\n', (10570, 10623), True, 'import tensorflow as tf\n'), ((10758, 10847), 'tensorflow.contrib.seq2seq.BahdanauAttention', 'tf.contrib.seq2seq.BahdanauAttention', (['self.opts.num_units', 'inputs'], {'dtype': 'scope.dtype'}), '(self.opts.num_units, inputs, dtype=\n scope.dtype)\n', (10794, 10847), True, 'import tensorflow as tf\n'), ((12469, 12510), 'tensorflow.nn.embedding_lookup', 'tf.nn.embedding_lookup', (['embedding', 'target'], {}), '(embedding, target)\n', (12491, 12510), True, 'import tensorflow as tf\n'), ((12863, 12922), 'numpy.full', 'np.full', (['[self.opts.batch_size]', 'tgt_sos_id'], {'dtype': 'np.int32'}), '([self.opts.batch_size], tgt_sos_id, dtype=np.int32)\n', (12870, 12922), True, 'import numpy as np\n'), ((12988, 13051), 'seq2seq_edits.GreedyEmbeddingHelperNoCond', 'GreedyEmbeddingHelperNoCond', (['embedding', 'start_tokens', 'end_token'], {}), '(embedding, start_tokens, end_token)\n', (13015, 13051), False, 'from seq2seq_edits import AttentionWrapperNoAssert, dynamic_decode, TrainingHelperNoCond, GreedyEmbeddingHelperNoCond\n'), ((14472, 14502), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(crossent * mask)'], {}), '(crossent * mask)\n', (14485, 14502), True, 'import tensorflow as tf\n'), ((2237, 2286), 'numpy.array', 'np.array', (['data.targets[batch_ids]'], {'dtype': 'np.int32'}), '(data.targets[batch_ids], dtype=np.int32)\n', (2245, 2286), True, 'import numpy as np\n'), ((6542, 6575), 'time.strftime', 'time.strftime', (['"""%Y%m%d_%H%M%S_%Z"""'], {}), "('%Y%m%d_%H%M%S_%Z')\n", (6555, 6575), False, 'import time\n'), ((8384, 8445), 'tensorflow.initializers.random_uniform', 'tf.initializers.random_uniform', ([], {'maxval': '(1.0)', 'dtype': 'scope.dtype'}), '(maxval=1.0, dtype=scope.dtype)\n', (8414, 8445), True, 'import tensorflow as tf\n'), ((12617, 12676), 'numpy.full', 'np.full', (['[self.opts.batch_size]', 'tgt_length'], {'dtype': 'np.int32'}), '([self.opts.batch_size], tgt_length, dtype=np.int32)\n', (12624, 12676), True, 'import numpy as np\n'), ((11818, 11874), 'tensorflow.zeros', 'tf.zeros', (['[self.opts.batch_size, atten_num_units]', 'dtype'], {}), '([self.opts.batch_size, atten_num_units], dtype)\n', (11826, 11874), True, 'import tensorflow as tf\n'), ((11901, 11925), 'tensorflow.constant', 'tf.constant', (['(0)', 'tf.int32'], {}), '(0, tf.int32)\n', (11912, 11925), True, 'import tensorflow as tf\n'), ((11958, 12014), 'tensorflow.zeros', 'tf.zeros', (['[self.opts.batch_size, self.src_length]', 'dtype'], {}), '([self.opts.batch_size, self.src_length], dtype)\n', (11966, 12014), True, 'import tensorflow as tf\n'), ((12094, 12150), 'tensorflow.zeros', 'tf.zeros', (['[self.opts.batch_size, self.src_length]', 'dtype'], {}), '([self.opts.batch_size, self.src_length], dtype)\n', (12102, 12150), True, 'import tensorflow as tf\n')]
""" Generates an executable with pytest runner embedded using PyInstaller. """ if __name__ == '__main__': import pytest import subprocess hidden = [] for x in pytest.freeze_includes(): hidden.extend(['--hidden-import', x]) args = ['pyinstaller', '--noconfirm'] + hidden + ['runtests_script.py'] subprocess.check_call(' '.join(args), shell=True)
[ "pytest.freeze_includes" ]
[((176, 200), 'pytest.freeze_includes', 'pytest.freeze_includes', ([], {}), '()\n', (198, 200), False, 'import pytest\n')]
#!/usr/bin/python # Copyright (c) 2020, 2022 Oracle and/or its affiliates. # This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Apache License v2.0 # See LICENSE.TXT for details. # GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = { "metadata_version": "1.1", "status": ["preview"], "supported_by": "community", } DOCUMENTATION = """ --- module: oci_appmgmt_control_monitored_instance_actions short_description: Perform actions on a MonitoredInstance resource in Oracle Cloud Infrastructure description: - Perform actions on a MonitoredInstance resource in Oracle Cloud Infrastructure - For I(action=activate_monitoring_plugin), activates Resource Plugin for compute instance identified by the instance ocid. Stores monitored instances Id and its state. Tries to enable Resource Monitoring plugin by making remote calls to Oracle Cloud Agent and Management Agent Cloud Service. - For I(action=publish_top_processes_metrics), starts cpu and memory top processes collection. version_added: "2.9.0" author: Oracle (@oracle) options: monitored_instance_id: description: - OCID of monitored instance. type: str aliases: ["id"] required: true action: description: - The action to perform on the MonitoredInstance. type: str required: true choices: - "activate_monitoring_plugin" - "publish_top_processes_metrics" extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_wait_options ] """ EXAMPLES = """ - name: Perform action activate_monitoring_plugin on monitored_instance oci_appmgmt_control_monitored_instance_actions: # required monitored_instance_id: "ocid1.monitoredinstance.oc1..xxxxxxEXAMPLExxxxxx" action: activate_monitoring_plugin - name: Perform action publish_top_processes_metrics on monitored_instance oci_appmgmt_control_monitored_instance_actions: # required monitored_instance_id: "ocid1.monitoredinstance.oc1..xxxxxxEXAMPLExxxxxx" action: publish_top_processes_metrics """ RETURN = """ monitored_instance: description: - Details of the MonitoredInstance resource acted upon by the current operation returned: on success type: complex contains: instance_id: description: - The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of monitored instance. returned: on success type: str sample: "ocid1.instance.oc1..xxxxxxEXAMPLExxxxxx" compartment_id: description: - Compartment Identifier L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) returned: on success type: str sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx" display_name: description: - A user-friendly name of the monitored instance. It is binded to L(Compute Instance,https://docs.cloud.oracle.com/Content/Compute/Concepts/computeoverview.htm). DisplayName is fetched from L(Core Service API,https://docs.cloud.oracle.com/api/#/en/iaas/20160918/Instance/). returned: on success type: str sample: display_name_example management_agent_id: description: - Management Agent Identifier L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm). Used to invoke manage operations on Management Agent Cloud Service. returned: on success type: str sample: "ocid1.managementagent.oc1..xxxxxxEXAMPLExxxxxx" time_created: description: - The time the MonitoredInstance was created. An RFC3339 formatted datetime string returned: on success type: str sample: "2013-10-20T19:20:30+01:00" time_updated: description: - The time the MonitoredInstance was updated. An RFC3339 formatted datetime string returned: on success type: str sample: "2013-10-20T19:20:30+01:00" monitoring_state: description: - Monitoring status. Can be either enabled or disabled. returned: on success type: str sample: ENABLED lifecycle_state: description: - The current state of the monitored instance. returned: on success type: str sample: CREATING lifecycle_details: description: - A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state. returned: on success type: str sample: lifecycle_details_example sample: { "instance_id": "ocid1.instance.oc1..xxxxxxEXAMPLExxxxxx", "compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx", "display_name": "display_name_example", "management_agent_id": "ocid1.managementagent.oc1..xxxxxxEXAMPLExxxxxx", "time_created": "2013-10-20T19:20:30+01:00", "time_updated": "2013-10-20T19:20:30+01:00", "monitoring_state": "ENABLED", "lifecycle_state": "CREATING", "lifecycle_details": "lifecycle_details_example" } """ from ansible.module_utils.basic import AnsibleModule from ansible_collections.oracle.oci.plugins.module_utils import ( oci_common_utils, oci_wait_utils, ) from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import ( OCIActionsHelperBase, get_custom_class, ) try: from oci.appmgmt_control import AppmgmtControlClient HAS_OCI_PY_SDK = True except ImportError: HAS_OCI_PY_SDK = False class MonitoredInstanceActionsHelperGen(OCIActionsHelperBase): """ Supported actions: activate_monitoring_plugin publish_top_processes_metrics """ @staticmethod def get_module_resource_id_param(): return "monitored_instance_id" def get_module_resource_id(self): return self.module.params.get("monitored_instance_id") def get_get_fn(self): return self.client.get_monitored_instance def get_resource(self): return oci_common_utils.call_with_backoff( self.client.get_monitored_instance, monitored_instance_id=self.module.params.get("monitored_instance_id"), ) def activate_monitoring_plugin(self): return oci_wait_utils.call_and_wait( call_fn=self.client.activate_monitoring_plugin, call_fn_args=(), call_fn_kwargs=dict( monitored_instance_id=self.module.params.get("monitored_instance_id"), ), waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY, operation="{0}_{1}".format( self.module.params.get("action").upper(), oci_common_utils.ACTION_OPERATION_KEY, ), waiter_client=self.get_waiter_client(), resource_helper=self, wait_for_states=oci_common_utils.get_work_request_completed_states(), ) def publish_top_processes_metrics(self): return oci_wait_utils.call_and_wait( call_fn=self.client.publish_top_processes_metrics, call_fn_args=(), call_fn_kwargs=dict( monitored_instance_id=self.module.params.get("monitored_instance_id"), ), waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY, operation="{0}_{1}".format( self.module.params.get("action").upper(), oci_common_utils.ACTION_OPERATION_KEY, ), waiter_client=self.get_waiter_client(), resource_helper=self, wait_for_states=oci_common_utils.get_work_request_completed_states(), ) MonitoredInstanceActionsHelperCustom = get_custom_class( "MonitoredInstanceActionsHelperCustom" ) class ResourceHelper( MonitoredInstanceActionsHelperCustom, MonitoredInstanceActionsHelperGen ): pass def main(): module_args = oci_common_utils.get_common_arg_spec( supports_create=False, supports_wait=True ) module_args.update( dict( monitored_instance_id=dict(aliases=["id"], type="str", required=True), action=dict( type="str", required=True, choices=["activate_monitoring_plugin", "publish_top_processes_metrics"], ), ) ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) if not HAS_OCI_PY_SDK: module.fail_json(msg="oci python sdk required for this module.") resource_helper = ResourceHelper( module=module, resource_type="monitored_instance", service_client_class=AppmgmtControlClient, namespace="appmgmt_control", ) result = resource_helper.perform_action(module.params.get("action")) module.exit_json(**result) if __name__ == "__main__": main()
[ "ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_common_arg_spec", "ansible.module_utils.basic.AnsibleModule", "ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils.get_custom_class", "ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_work_request_completed_states" ]
[((8365, 8421), 'ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils.get_custom_class', 'get_custom_class', (['"""MonitoredInstanceActionsHelperCustom"""'], {}), "('MonitoredInstanceActionsHelperCustom')\n", (8381, 8421), False, 'from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import OCIActionsHelperBase, get_custom_class\n'), ((8572, 8651), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_common_arg_spec', 'oci_common_utils.get_common_arg_spec', ([], {'supports_create': '(False)', 'supports_wait': '(True)'}), '(supports_create=False, supports_wait=True)\n', (8608, 8651), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n'), ((9005, 9071), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'module_args', 'supports_check_mode': '(True)'}), '(argument_spec=module_args, supports_check_mode=True)\n', (9018, 9071), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((7532, 7584), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_work_request_completed_states', 'oci_common_utils.get_work_request_completed_states', ([], {}), '()\n', (7582, 7584), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n'), ((8260, 8312), 'ansible_collections.oracle.oci.plugins.module_utils.oci_common_utils.get_work_request_completed_states', 'oci_common_utils.get_work_request_completed_states', ([], {}), '()\n', (8310, 8312), False, 'from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils, oci_wait_utils\n')]
from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ImproperlyConfigured from django.db import models from django.utils.translation import gettext_lazy as _ from simpleseo.utils import get_generic_lang_code class SeoMetadata(models.Model): content_type = models.ForeignKey( ContentType, on_delete=models.CASCADE, null=True, blank=True) object_id = models.PositiveIntegerField(null=True, blank=True) content_object = GenericForeignKey('content_type', 'object_id') path = models.CharField(verbose_name=_('Path'), max_length=255, db_index=True, help_text=_("This should be an absolute path, excluding " "the domain name. Example: '/foo/bar/'.")) lang_code = models.CharField(verbose_name=_('Language'), max_length=255, choices=settings.LANGUAGES, default=get_generic_lang_code()) title = models.CharField(verbose_name=_('Title'), max_length=255, blank=True, help_text=_("Recommended length: up to 70 symbols")) description = models.CharField(verbose_name=_('Description'), max_length=255, blank=True, help_text=_("Recommended length: up to 160 symbols.")) keywords = models.CharField(verbose_name=_('Keywords'), max_length=255, blank=True, help_text=_("Recommended length: up to 10 keyword phrases.")) text = models.TextField(verbose_name=_('Text'), blank=True) class Meta: verbose_name = _('SEO metadata') verbose_name_plural = _('SEO metadata') db_table = 'seo_metadata' unique_together = (('path', 'lang_code'), ) ordering = ('path', 'lang_code') def __str__(self): return "Language: %s | URL: %s" % (self.lang_code, self.path) def get_absolute_url(self): return self.path def update_seo(sender, instance, **kwargs): newpath = instance.get_absolute_url() SeoMetadata.objects.filter(content_object=instance).update(path=newpath) def register_seo_signals(): for app, model in getattr(settings, 'SEO_MODELS', []): ctype = ContentType.objects.get(app_label=app, model=model) if not hasattr(ctype.model_class(), 'get_absolute_url'): raise ImproperlyConfigured( "Needed get_absolute_url method not defined on %s.%s model." % (app, model) ) models.signals.post_save.connect(update_seo, sender=ctype.model_class(), weak=False)
[ "django.core.exceptions.ImproperlyConfigured", "django.contrib.contenttypes.fields.GenericForeignKey", "django.contrib.contenttypes.models.ContentType.objects.get", "django.db.models.ForeignKey", "django.utils.translation.gettext_lazy", "django.db.models.PositiveIntegerField", "simpleseo.utils.get_generic_lang_code" ]
[((401, 480), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ContentType'], {'on_delete': 'models.CASCADE', 'null': '(True)', 'blank': '(True)'}), '(ContentType, on_delete=models.CASCADE, null=True, blank=True)\n', (418, 480), False, 'from django.db import models\n'), ((506, 556), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (533, 556), False, 'from django.db import models\n'), ((578, 624), 'django.contrib.contenttypes.fields.GenericForeignKey', 'GenericForeignKey', (['"""content_type"""', '"""object_id"""'], {}), "('content_type', 'object_id')\n", (595, 624), False, 'from django.contrib.contenttypes.fields import GenericForeignKey\n'), ((1715, 1732), 'django.utils.translation.gettext_lazy', '_', (['"""SEO metadata"""'], {}), "('SEO metadata')\n", (1716, 1732), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1763, 1780), 'django.utils.translation.gettext_lazy', '_', (['"""SEO metadata"""'], {}), "('SEO metadata')\n", (1764, 1780), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2330, 2381), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'app_label': 'app', 'model': 'model'}), '(app_label=app, model=model)\n', (2353, 2381), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((666, 675), 'django.utils.translation.gettext_lazy', '_', (['"""Path"""'], {}), "('Path')\n", (667, 675), True, 'from django.utils.translation import gettext_lazy as _\n'), ((746, 837), 'django.utils.translation.gettext_lazy', '_', (['"""This should be an absolute path, excluding the domain name. Example: \'/foo/bar/\'."""'], {}), '("This should be an absolute path, excluding the domain name. Example: \'/foo/bar/\'."\n )\n', (747, 837), True, 'from django.utils.translation import gettext_lazy as _\n'), ((923, 936), 'django.utils.translation.gettext_lazy', '_', (['"""Language"""'], {}), "('Language')\n", (924, 936), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1056, 1079), 'simpleseo.utils.get_generic_lang_code', 'get_generic_lang_code', ([], {}), '()\n', (1077, 1079), False, 'from simpleseo.utils import get_generic_lang_code\n'), ((1123, 1133), 'django.utils.translation.gettext_lazy', '_', (['"""Title"""'], {}), "('Title')\n", (1124, 1133), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1202, 1243), 'django.utils.translation.gettext_lazy', '_', (['"""Recommended length: up to 70 symbols"""'], {}), "('Recommended length: up to 70 symbols')\n", (1203, 1243), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1293, 1309), 'django.utils.translation.gettext_lazy', '_', (['"""Description"""'], {}), "('Description')\n", (1294, 1309), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1384, 1427), 'django.utils.translation.gettext_lazy', '_', (['"""Recommended length: up to 160 symbols."""'], {}), "('Recommended length: up to 160 symbols.')\n", (1385, 1427), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1474, 1487), 'django.utils.translation.gettext_lazy', '_', (['"""Keywords"""'], {}), "('Keywords')\n", (1475, 1487), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1559, 1609), 'django.utils.translation.gettext_lazy', '_', (['"""Recommended length: up to 10 keyword phrases."""'], {}), "('Recommended length: up to 10 keyword phrases.')\n", (1560, 1609), True, 'from django.utils.translation import gettext_lazy as _\n'), ((1652, 1661), 'django.utils.translation.gettext_lazy', '_', (['"""Text"""'], {}), "('Text')\n", (1653, 1661), True, 'from django.utils.translation import gettext_lazy as _\n'), ((2465, 2572), 'django.core.exceptions.ImproperlyConfigured', 'ImproperlyConfigured', (["('Needed get_absolute_url method not defined on %s.%s model.' % (app, model))"], {}), "(\n 'Needed get_absolute_url method not defined on %s.%s model.' % (app, model)\n )\n", (2485, 2572), False, 'from django.core.exceptions import ImproperlyConfigured\n')]
import cv2 import numpy as np from random import randint from functools import reduce from os import walk from scipy.spatial import ConvexHull DIMENSIONS = (512, 512) def fragment_overlay(background_img, masked_fragment): mask = masked_fragment.astype(int).sum(-1) == np.zeros(DIMENSIONS) background_img = np.where(mask[..., None], background_img, masked_fragment) return background_img def transparent_superimposition(background_img, masked_fragment): mask = masked_fragment.astype(int).sum(-1) == np.zeros(DIMENSIONS) background_img = np.where(mask[..., None], background_img, cv2.addWeighted(background_img, 0.5, masked_fragment, 0.5, 0)) return background_img def polygon_area(vertices): x, y = vertices[:, 0], vertices[:, 1] correction = x[-1] * y[0] - y[-1] * x[0] main_area = np.dot(x[:-1], y[1:]) - np.dot(y[:-1], x[1:]) return 0.5 * np.abs(main_area + correction) def lab_adjust(image, delta_light=0, clip_limit=1.0): lab = cv2.cvtColor(image, cv2.COLOR_BGR2LAB) l, a, b = cv2.split(lab) clahe = cv2.createCLAHE(clipLimit=clip_limit) cl = clahe.apply(l) cl = cv2.add(cl, delta_light) limg = cv2.merge((cl, a, b)) final = cv2.cvtColor(limg, cv2.COLOR_LAB2BGR) return final def rotate(image, angle=0, scale=1.0): center = tuple(ti//2 for ti in DIMENSIONS) M = cv2.getRotationMatrix2D(center, angle, scale) rotated = cv2.warpAffine(image, M, DIMENSIONS) return rotated def darken(image, delta_light=-25): return lab_adjust(image, delta_light=delta_light) def lighten(image, delta_light=25): return lab_adjust(image, delta_light=delta_light) def increase_contrast(image, clip_limit=2.5): return lab_adjust(image, clip_limit=clip_limit) def blur(image): blurred = cv2.GaussianBlur(image, (3, 3), 0) return blurred def do_nothing(image): return image def random_image_adjustment(): # change contrast, brightness, rotate?, blur? - functions with respective probabilities of being chosen chosen_transformations = [] for list_of_functions in all_transformations: chosen_transformations.append(np.random.choice(list_of_functions, p=[0.6, 0.2, 0.2])) return compose_functions(*chosen_transformations) def compose_functions(*func): def compose(f, g): return lambda x: f(g(x)) return reduce(compose, func, lambda x: x) def load(path='images'): _, _, filenames = next(walk(path)) return np.array(list(map(lambda x: cv2.resize(cv2.imread(path + '/' + x), DIMENSIONS), filenames)), dtype='uint8') def save(image, identifier=123): cv2.imwrite(f"results/{identifier}_{randint(0, 1000)}.jpg", image) def random_point(shift=[255, 255], deviation=256): x, y = randint(shift[0] - deviation, shift[0] + deviation), randint(shift[1] - deviation, shift[1] + deviation) return np.array([x, y]) def random_mask(): mask = np.zeros(DIMENSIONS, dtype='uint8') cv2.fillPoly(mask, pts=[random_polygon(9)], color=255) return mask def random_polygon(n): points = np.random.randint(0, 511, size=(n, 2)) hull = ConvexHull(points) return points[hull.vertices] all_transformations = [[do_nothing, do_nothing, increase_contrast], [do_nothing, darken, lighten], [do_nothing, do_nothing, blur]]
[ "cv2.GaussianBlur", "numpy.abs", "os.walk", "cv2.warpAffine", "numpy.random.randint", "cv2.getRotationMatrix2D", "random.randint", "cv2.cvtColor", "cv2.split", "numpy.random.choice", "cv2.addWeighted", "cv2.createCLAHE", "numpy.dot", "cv2.merge", "scipy.spatial.ConvexHull", "cv2.add", "numpy.zeros", "cv2.imread", "numpy.where", "numpy.array", "functools.reduce" ]
[((329, 387), 'numpy.where', 'np.where', (['mask[..., None]', 'background_img', 'masked_fragment'], {}), '(mask[..., None], background_img, masked_fragment)\n', (337, 387), True, 'import numpy as np\n'), ((1016, 1054), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2LAB'], {}), '(image, cv2.COLOR_BGR2LAB)\n', (1028, 1054), False, 'import cv2\n'), ((1070, 1084), 'cv2.split', 'cv2.split', (['lab'], {}), '(lab)\n', (1079, 1084), False, 'import cv2\n'), ((1098, 1135), 'cv2.createCLAHE', 'cv2.createCLAHE', ([], {'clipLimit': 'clip_limit'}), '(clipLimit=clip_limit)\n', (1113, 1135), False, 'import cv2\n'), ((1171, 1195), 'cv2.add', 'cv2.add', (['cl', 'delta_light'], {}), '(cl, delta_light)\n', (1178, 1195), False, 'import cv2\n'), ((1208, 1229), 'cv2.merge', 'cv2.merge', (['(cl, a, b)'], {}), '((cl, a, b))\n', (1217, 1229), False, 'import cv2\n'), ((1243, 1280), 'cv2.cvtColor', 'cv2.cvtColor', (['limg', 'cv2.COLOR_LAB2BGR'], {}), '(limg, cv2.COLOR_LAB2BGR)\n', (1255, 1280), False, 'import cv2\n'), ((1400, 1445), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['center', 'angle', 'scale'], {}), '(center, angle, scale)\n', (1423, 1445), False, 'import cv2\n'), ((1461, 1497), 'cv2.warpAffine', 'cv2.warpAffine', (['image', 'M', 'DIMENSIONS'], {}), '(image, M, DIMENSIONS)\n', (1475, 1497), False, 'import cv2\n'), ((1851, 1885), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['image', '(3, 3)', '(0)'], {}), '(image, (3, 3), 0)\n', (1867, 1885), False, 'import cv2\n'), ((2440, 2474), 'functools.reduce', 'reduce', (['compose', 'func', '(lambda x: x)'], {}), '(compose, func, lambda x: x)\n', (2446, 2474), False, 'from functools import reduce\n'), ((2960, 2976), 'numpy.array', 'np.array', (['[x, y]'], {}), '([x, y])\n', (2968, 2976), True, 'import numpy as np\n'), ((3013, 3048), 'numpy.zeros', 'np.zeros', (['DIMENSIONS'], {'dtype': '"""uint8"""'}), "(DIMENSIONS, dtype='uint8')\n", (3021, 3048), True, 'import numpy as np\n'), ((3168, 3206), 'numpy.random.randint', 'np.random.randint', (['(0)', '(511)'], {'size': '(n, 2)'}), '(0, 511, size=(n, 2))\n', (3185, 3206), True, 'import numpy as np\n'), ((3219, 3237), 'scipy.spatial.ConvexHull', 'ConvexHull', (['points'], {}), '(points)\n', (3229, 3237), False, 'from scipy.spatial import ConvexHull\n'), ((286, 306), 'numpy.zeros', 'np.zeros', (['DIMENSIONS'], {}), '(DIMENSIONS)\n', (294, 306), True, 'import numpy as np\n'), ((537, 557), 'numpy.zeros', 'np.zeros', (['DIMENSIONS'], {}), '(DIMENSIONS)\n', (545, 557), True, 'import numpy as np\n'), ((622, 683), 'cv2.addWeighted', 'cv2.addWeighted', (['background_img', '(0.5)', 'masked_fragment', '(0.5)', '(0)'], {}), '(background_img, 0.5, masked_fragment, 0.5, 0)\n', (637, 683), False, 'import cv2\n'), ((851, 872), 'numpy.dot', 'np.dot', (['x[:-1]', 'y[1:]'], {}), '(x[:-1], y[1:])\n', (857, 872), True, 'import numpy as np\n'), ((875, 896), 'numpy.dot', 'np.dot', (['y[:-1]', 'x[1:]'], {}), '(y[:-1], x[1:])\n', (881, 896), True, 'import numpy as np\n'), ((915, 945), 'numpy.abs', 'np.abs', (['(main_area + correction)'], {}), '(main_area + correction)\n', (921, 945), True, 'import numpy as np\n'), ((2533, 2543), 'os.walk', 'walk', (['path'], {}), '(path)\n', (2537, 2543), False, 'from os import walk\n'), ((2843, 2894), 'random.randint', 'randint', (['(shift[0] - deviation)', '(shift[0] + deviation)'], {}), '(shift[0] - deviation, shift[0] + deviation)\n', (2850, 2894), False, 'from random import randint\n'), ((2896, 2947), 'random.randint', 'randint', (['(shift[1] - deviation)', '(shift[1] + deviation)'], {}), '(shift[1] - deviation, shift[1] + deviation)\n', (2903, 2947), False, 'from random import randint\n'), ((2220, 2274), 'numpy.random.choice', 'np.random.choice', (['list_of_functions'], {'p': '[0.6, 0.2, 0.2]'}), '(list_of_functions, p=[0.6, 0.2, 0.2])\n', (2236, 2274), True, 'import numpy as np\n'), ((2744, 2760), 'random.randint', 'randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (2751, 2760), False, 'from random import randint\n'), ((2596, 2622), 'cv2.imread', 'cv2.imread', (["(path + '/' + x)"], {}), "(path + '/' + x)\n", (2606, 2622), False, 'import cv2\n')]
# -*- coding: utf-8 -*- """Console script for ptrello.""" import sys import click import logging from ptrello import api from ptrello.core.config import logger # from ptrello.core.config import settings # import inspect logger = logging.getLogger("ptrello."+__name__) default_note = "quicknote.txt" class Config(object): l = [] pass # pass_config = click.make_pass_decorator(Config, ensure=True) @click.group(chain=True) # @pass_config @click.pass_context def main(ctx): ctx.obj ={'trello':None} pass @click.pass_context def populate_context(ctx, args, show_all_lists, board_filter, card_filter, target_list=None): input_text = args _output = [] _target = [] ctx.obj['input_args'] = args try: if not ctx.obj['trello']: ctx.obj['input_args'] = args e = api.guess_card_list_board(input_text, board_filter=board_filter, card_filter=card_filter, show_all_lists=show_all_lists) _output.extend(e) ctx.obj['trello'] = _output if target_list: ctx.obj['input_args_target_list'] = target_list _target = [] _target.extend( api.guess_card_list_board(target_list, board_filter=board_filter , card_filter=card_filter, show_all_lists=False)) ctx.obj['target_ctx'] = _target except ValueError as err: handle_error(err) logger.warning("Could not retrieve trello objects. {}".format(err)) @main.command('card') @click.pass_context @click.argument('args', nargs=3, required=False) @click.option('--match_all_lists/--match_intersect_lists', default=False, help='Show all lists, or only those that match') @click.option('--card_filter', default='open', help='Card filter (open, closed)') @click.option('--board_filter', default='starred', help='Board filter (starred, open, close)') def card(ctx, args, match_all_lists, board_filter, card_filter): try: if not ctx.obj['trello']: populate_context(args,board_filter=board_filter, card_filter=card_filter, show_all_lists=match_all_lists) print_context_sorted_list() # print(50*'*') # print_context_cards() # print(50*'*')~ # print_context_lists() except ValueError as err: handle_error(err) pass @main.command() @click.pass_context @click.argument('args', nargs=3, required=False) @click.option('--match_all_lists/--match_intersect_lists', default=False, help='Show all lists, or only those that match') @click.option('--card_filter', default='open', help='Card filter (open, closed)') @click.option('--board_filter', default='starred', help='Board filter (starred, open, close)') def show(ctx, args, match_all_lists, board_filter, card_filter): try: if not ctx.obj['trello']: populate_context(args,board_filter=board_filter, card_filter=card_filter, show_all_lists=match_all_lists) except: pass try: print_context_card_detail() except Exception as e: logger.error("Handled error occured: {}".format(e.args[0])) click.secho(e.args[0], fg='red') @main.command() @click.pass_context @click.argument('args', nargs=3, required=False) @click.option('--match_all_lists/--match_intersect_lists', default=False, help='Show all lists, or only those that match') @click.option('--card_filter', default='open', help='Card filter (open, closed)') @click.option('--board_filter', default='starred', help='Board filter (starred, open, close)') def add(ctx, args, match_all_lists, board_filter, card_filter): try: if not ctx.obj['trello']: populate_context(args,board_filter=board_filter, card_filter=card_filter, show_all_lists=match_all_lists) except: pass cards = get_context_filtered_cards() list = get_context_filtered_lists() if len(cards) or len(list) > 1: error_string = "There were {} lists and {} cards matching.Please make the card name is unique " \ "and there is only one list to place the card on.".format(len(list), len(cards)) click.secho(error_string, fg='red') return description = click.prompt('Enter a description', default='') labels = click.prompt('Enter labels seperated by spaces', default='personal', show_default=True) due_date = click.prompt('Enter due date', show_default=True, default='') print(ctx.obj['input_args'][-1]) print(ctx.obj['trello'][0]['filtered_lists'][0]) api.add_card(list=ctx.obj['trello'][0]['filtered_lists'][0], name=ctx.obj['input_args'][-1], description= description, labels=labels, due_date=due_date) @main.command() @click.pass_context @click.argument('args', nargs=3, required=False) @click.option('--text',default=None, required=False) @click.option('--match_all_lists/--match_intersect_lists', default=False, help='Show all lists, or only those that match') @click.option('--card_filter', default='open', help='Card filter (open, closed)') @click.option('--board_filter', default='starred', help='Board filter (starred, open, close)') def comment(ctx, args, text, match_all_lists, board_filter, card_filter): try: if not ctx.obj['trello']: populate_context(args,board_filter=board_filter, card_filter=card_filter, show_all_lists=match_all_lists) except: pass cards = get_context_filtered_cards() if len(cards) > 1: error_string = "More than one card found--(). Could not add comments".format( len(cards)) click.secho(error_string, fg='red') return if not text: text = click.prompt('Enter comment', show_default=True, default='') api.add_comment(cards[0], text) @main.command() @click.pass_context @click.argument('args', nargs=3, required=False) @click.option('--target_list', nargs=2, required=True) @click.option('--match_all_lists/--match_intersect_lists', default=False, help='Show all lists, or only those that match') @click.option('--card_filter', default='open', help='Card filter (open, closed)') @click.option('--board_filter', default='starred', help='Board filter (starred, open, close)') def move(ctx, args, target_list, match_all_lists, board_filter, card_filter): try: if not args and ctx.obj['input_args']: args = ctx.obj['input_args'] print(args) else: pass populate_context(args, board_filter=board_filter, card_filter=card_filter , show_all_lists=match_all_lists, target_list=target_list) c = get_context_filtered_cards() if len(c) > 0: yn = click.prompt("There are {} cards selected, are you sure you want to move them all?".format(len(c))) if str.lower(yn) == 'y': target_board = ctx.obj['target_ctx'][0]['board'] api.move_card(card=c, target_board_id=target_board.id, target_list_id=ctx.obj['target_ctx'][0]['filtered_lists'][0].id) else: click.secho("card(s) not moved") return except Exception as e: handle_error(e, sys._getframe().f_code.co_name) @click.pass_context def get_context_sorted_list(ctx): for obj in ctx.obj['trello']: return obj['sorted_list'] @click.pass_context def get_context_filtered_cards(ctx): for obj in ctx.obj['trello']: # print(obj['filtered_cards']) return obj['filtered_cards'] @click.pass_context def get_context_filtered_lists(ctx): for obj in ctx.obj['trello']: return obj['filtered_lists'] def print_context_sorted_list(): try: for item in get_context_sorted_list(): click.secho(api.print_trello_object(item)[0], fg='yellow') except Exception as e: pass def print_context_cards(): for item in get_context_filtered_cards(): click.secho(api.print_trello_object(item)[0], fg='yellow') def print_context_lists(): for item in get_context_filtered_lists(): click.secho(api.print_trello_object(item)[0], fg='yellow') def get_context_card_detail(get_comments=False): list_of_card_dicts = [] for item in get_context_filtered_cards(): comments = [] card_dict = {} if get_comments: comments.extend(reversed(item.get_comments())) card_dict['short_id'] = item.short_id card_dict['name'] = item.name card_dict['board_name'] = item.board.name card_dict['list_name'] = api.get_list_name_for_card(item ,get_context_filtered_lists()) card_dict['card_created_date'] = item.card_created_date card_dict['due_date'] = item.due_date card_dict['description'] = item.description card_dict['labels'] = item.labels card_dict['comments'] = comments list_of_card_dicts.append(card_dict) return list_of_card_dicts def print_context_card_detail(): loc = get_context_card_detail(True) for item in loc: click.secho(100 * "-", fg='blue') click.secho("\n", fg='blue') click.secho("### Name({}): {}".format(item['short_id'], item['name']), fg='green', bold=True, nl="\n") click.secho("Path: {} > {}".format(item['board_name'],item['list_name']), fg='yellow', nl="\n\n") click.secho("Create Date: {} ".format(item['card_created_date']), fg='yellow', nl="\n") click.secho("Due Date: {} ".format(item['due_date']), fg='yellow', nl="\n") click.secho("Desc: {} ".format(item['description']), fg='yellow', nl="\n") click.secho("Labels: {} ".format(item['labels']), fg='yellow', nl="\n") click.secho("Comments: ", fg='yellow') for comm in item['comments']: click.secho("{} - {} ".format(comm['date'], comm['data']['text']), fg='yellow', nl="\n") click.secho("\n") def handle_error(err, name=None): ep = "" for e in err.args: ep += e click.secho(e, fg='red') logger.warning("{} type error encountered from function {}: {} ".format(type(err), name, err)) if __name__ == "__main__": main(obj={'trello':None})
[ "ptrello.api.print_trello_object", "ptrello.api.guess_card_list_board", "click.argument", "click.option", "ptrello.api.move_card", "ptrello.api.add_card", "sys._getframe", "ptrello.api.add_comment", "click.group", "click.secho", "logging.getLogger", "click.prompt" ]
[((232, 272), 'logging.getLogger', 'logging.getLogger', (["('ptrello.' + __name__)"], {}), "('ptrello.' + __name__)\n", (249, 272), False, 'import logging\n'), ((414, 437), 'click.group', 'click.group', ([], {'chain': '(True)'}), '(chain=True)\n', (425, 437), False, 'import click\n'), ((1598, 1645), 'click.argument', 'click.argument', (['"""args"""'], {'nargs': '(3)', 'required': '(False)'}), "('args', nargs=3, required=False)\n", (1612, 1645), False, 'import click\n'), ((1647, 1772), 'click.option', 'click.option', (['"""--match_all_lists/--match_intersect_lists"""'], {'default': '(False)', 'help': '"""Show all lists, or only those that match"""'}), "('--match_all_lists/--match_intersect_lists', default=False,\n help='Show all lists, or only those that match')\n", (1659, 1772), False, 'import click\n'), ((1770, 1855), 'click.option', 'click.option', (['"""--card_filter"""'], {'default': '"""open"""', 'help': '"""Card filter (open, closed)"""'}), "('--card_filter', default='open', help='Card filter (open, closed)'\n )\n", (1782, 1855), False, 'import click\n'), ((1852, 1950), 'click.option', 'click.option', (['"""--board_filter"""'], {'default': '"""starred"""', 'help': '"""Board filter (starred, open, close)"""'}), "('--board_filter', default='starred', help=\n 'Board filter (starred, open, close)')\n", (1864, 1950), False, 'import click\n'), ((2429, 2476), 'click.argument', 'click.argument', (['"""args"""'], {'nargs': '(3)', 'required': '(False)'}), "('args', nargs=3, required=False)\n", (2443, 2476), False, 'import click\n'), ((2478, 2603), 'click.option', 'click.option', (['"""--match_all_lists/--match_intersect_lists"""'], {'default': '(False)', 'help': '"""Show all lists, or only those that match"""'}), "('--match_all_lists/--match_intersect_lists', default=False,\n help='Show all lists, or only those that match')\n", (2490, 2603), False, 'import click\n'), ((2601, 2686), 'click.option', 'click.option', (['"""--card_filter"""'], {'default': '"""open"""', 'help': '"""Card filter (open, closed)"""'}), "('--card_filter', default='open', help='Card filter (open, closed)'\n )\n", (2613, 2686), False, 'import click\n'), ((2683, 2781), 'click.option', 'click.option', (['"""--board_filter"""'], {'default': '"""starred"""', 'help': '"""Board filter (starred, open, close)"""'}), "('--board_filter', default='starred', help=\n 'Board filter (starred, open, close)')\n", (2695, 2781), False, 'import click\n'), ((3250, 3297), 'click.argument', 'click.argument', (['"""args"""'], {'nargs': '(3)', 'required': '(False)'}), "('args', nargs=3, required=False)\n", (3264, 3297), False, 'import click\n'), ((3299, 3424), 'click.option', 'click.option', (['"""--match_all_lists/--match_intersect_lists"""'], {'default': '(False)', 'help': '"""Show all lists, or only those that match"""'}), "('--match_all_lists/--match_intersect_lists', default=False,\n help='Show all lists, or only those that match')\n", (3311, 3424), False, 'import click\n'), ((3422, 3507), 'click.option', 'click.option', (['"""--card_filter"""'], {'default': '"""open"""', 'help': '"""Card filter (open, closed)"""'}), "('--card_filter', default='open', help='Card filter (open, closed)'\n )\n", (3434, 3507), False, 'import click\n'), ((3504, 3602), 'click.option', 'click.option', (['"""--board_filter"""'], {'default': '"""starred"""', 'help': '"""Board filter (starred, open, close)"""'}), "('--board_filter', default='starred', help=\n 'Board filter (starred, open, close)')\n", (3516, 3602), False, 'import click\n'), ((4796, 4843), 'click.argument', 'click.argument', (['"""args"""'], {'nargs': '(3)', 'required': '(False)'}), "('args', nargs=3, required=False)\n", (4810, 4843), False, 'import click\n'), ((4845, 4897), 'click.option', 'click.option', (['"""--text"""'], {'default': 'None', 'required': '(False)'}), "('--text', default=None, required=False)\n", (4857, 4897), False, 'import click\n'), ((4898, 5023), 'click.option', 'click.option', (['"""--match_all_lists/--match_intersect_lists"""'], {'default': '(False)', 'help': '"""Show all lists, or only those that match"""'}), "('--match_all_lists/--match_intersect_lists', default=False,\n help='Show all lists, or only those that match')\n", (4910, 5023), False, 'import click\n'), ((5021, 5106), 'click.option', 'click.option', (['"""--card_filter"""'], {'default': '"""open"""', 'help': '"""Card filter (open, closed)"""'}), "('--card_filter', default='open', help='Card filter (open, closed)'\n )\n", (5033, 5106), False, 'import click\n'), ((5103, 5201), 'click.option', 'click.option', (['"""--board_filter"""'], {'default': '"""starred"""', 'help': '"""Board filter (starred, open, close)"""'}), "('--board_filter', default='starred', help=\n 'Board filter (starred, open, close)')\n", (5115, 5201), False, 'import click\n'), ((5854, 5901), 'click.argument', 'click.argument', (['"""args"""'], {'nargs': '(3)', 'required': '(False)'}), "('args', nargs=3, required=False)\n", (5868, 5901), False, 'import click\n'), ((5903, 5956), 'click.option', 'click.option', (['"""--target_list"""'], {'nargs': '(2)', 'required': '(True)'}), "('--target_list', nargs=2, required=True)\n", (5915, 5956), False, 'import click\n'), ((5958, 6083), 'click.option', 'click.option', (['"""--match_all_lists/--match_intersect_lists"""'], {'default': '(False)', 'help': '"""Show all lists, or only those that match"""'}), "('--match_all_lists/--match_intersect_lists', default=False,\n help='Show all lists, or only those that match')\n", (5970, 6083), False, 'import click\n'), ((6081, 6166), 'click.option', 'click.option', (['"""--card_filter"""'], {'default': '"""open"""', 'help': '"""Card filter (open, closed)"""'}), "('--card_filter', default='open', help='Card filter (open, closed)'\n )\n", (6093, 6166), False, 'import click\n'), ((6163, 6261), 'click.option', 'click.option', (['"""--board_filter"""'], {'default': '"""starred"""', 'help': '"""Board filter (starred, open, close)"""'}), "('--board_filter', default='starred', help=\n 'Board filter (starred, open, close)')\n", (6175, 6261), False, 'import click\n'), ((4259, 4306), 'click.prompt', 'click.prompt', (['"""Enter a description"""'], {'default': '""""""'}), "('Enter a description', default='')\n", (4271, 4306), False, 'import click\n'), ((4320, 4411), 'click.prompt', 'click.prompt', (['"""Enter labels seperated by spaces"""'], {'default': '"""personal"""', 'show_default': '(True)'}), "('Enter labels seperated by spaces', default='personal',\n show_default=True)\n", (4332, 4411), False, 'import click\n'), ((4423, 4484), 'click.prompt', 'click.prompt', (['"""Enter due date"""'], {'show_default': '(True)', 'default': '""""""'}), "('Enter due date', show_default=True, default='')\n", (4435, 4484), False, 'import click\n'), ((4580, 4741), 'ptrello.api.add_card', 'api.add_card', ([], {'list': "ctx.obj['trello'][0]['filtered_lists'][0]", 'name': "ctx.obj['input_args'][-1]", 'description': 'description', 'labels': 'labels', 'due_date': 'due_date'}), "(list=ctx.obj['trello'][0]['filtered_lists'][0], name=ctx.obj[\n 'input_args'][-1], description=description, labels=labels, due_date=\n due_date)\n", (4592, 4741), False, 'from ptrello import api\n'), ((5782, 5813), 'ptrello.api.add_comment', 'api.add_comment', (['cards[0]', 'text'], {}), '(cards[0], text)\n', (5797, 5813), False, 'from ptrello import api\n'), ((10053, 10077), 'click.secho', 'click.secho', (['e'], {'fg': '"""red"""'}), "(e, fg='red')\n", (10064, 10077), False, 'import click\n'), ((4187, 4222), 'click.secho', 'click.secho', (['error_string'], {'fg': '"""red"""'}), "(error_string, fg='red')\n", (4198, 4222), False, 'import click\n'), ((5631, 5666), 'click.secho', 'click.secho', (['error_string'], {'fg': '"""red"""'}), "(error_string, fg='red')\n", (5642, 5666), False, 'import click\n'), ((5716, 5776), 'click.prompt', 'click.prompt', (['"""Enter comment"""'], {'show_default': '(True)', 'default': '""""""'}), "('Enter comment', show_default=True, default='')\n", (5728, 5776), False, 'import click\n'), ((9118, 9151), 'click.secho', 'click.secho', (["(100 * '-')"], {'fg': '"""blue"""'}), "(100 * '-', fg='blue')\n", (9129, 9151), False, 'import click\n'), ((9160, 9188), 'click.secho', 'click.secho', (['"""\n"""'], {'fg': '"""blue"""'}), "('\\n', fg='blue')\n", (9171, 9188), False, 'import click\n'), ((9757, 9795), 'click.secho', 'click.secho', (['"""Comments: """'], {'fg': '"""yellow"""'}), "('Comments: ', fg='yellow')\n", (9768, 9795), False, 'import click\n'), ((9943, 9960), 'click.secho', 'click.secho', (['"""\n"""'], {}), "('\\n')\n", (9954, 9960), False, 'import click\n'), ((834, 958), 'ptrello.api.guess_card_list_board', 'api.guess_card_list_board', (['input_text'], {'board_filter': 'board_filter', 'card_filter': 'card_filter', 'show_all_lists': 'show_all_lists'}), '(input_text, board_filter=board_filter,\n card_filter=card_filter, show_all_lists=show_all_lists)\n', (859, 958), False, 'from ptrello import api\n'), ((3178, 3210), 'click.secho', 'click.secho', (['e.args[0]'], {'fg': '"""red"""'}), "(e.args[0], fg='red')\n", (3189, 3210), False, 'import click\n'), ((1203, 1319), 'ptrello.api.guess_card_list_board', 'api.guess_card_list_board', (['target_list'], {'board_filter': 'board_filter', 'card_filter': 'card_filter', 'show_all_lists': '(False)'}), '(target_list, board_filter=board_filter,\n card_filter=card_filter, show_all_lists=False)\n', (1228, 1319), False, 'from ptrello import api\n'), ((6980, 7104), 'ptrello.api.move_card', 'api.move_card', ([], {'card': 'c', 'target_board_id': 'target_board.id', 'target_list_id': "ctx.obj['target_ctx'][0]['filtered_lists'][0].id"}), "(card=c, target_board_id=target_board.id, target_list_id=ctx.\n obj['target_ctx'][0]['filtered_lists'][0].id)\n", (6993, 7104), False, 'from ptrello import api\n'), ((7136, 7168), 'click.secho', 'click.secho', (['"""card(s) not moved"""'], {}), "('card(s) not moved')\n", (7147, 7168), False, 'import click\n'), ((8005, 8034), 'ptrello.api.print_trello_object', 'api.print_trello_object', (['item'], {}), '(item)\n', (8028, 8034), False, 'from ptrello import api\n'), ((8155, 8184), 'ptrello.api.print_trello_object', 'api.print_trello_object', (['item'], {}), '(item)\n', (8178, 8184), False, 'from ptrello import api\n'), ((7815, 7844), 'ptrello.api.print_trello_object', 'api.print_trello_object', (['item'], {}), '(item)\n', (7838, 7844), False, 'from ptrello import api\n'), ((7244, 7259), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (7257, 7259), False, 'import sys\n')]
import gzip import io import json import os import random import time import requests import requests_cache requests_cache.install_cache() request = requests.get('https://rpg.rigden.us/seeds_of_infinity/resources/json/names.json') NAMES = request.json()['data'] HOME_PATH = os.path.dirname(os.path.realpath(__file__)) JSON_PATH = os.path.join(HOME_PATH, "json") def json_formatter(data, title): data_dict = {} data_dict['meta'] = {} data_dict['meta']['author'] = "<NAME>" data_dict['meta']['generator'] = "https://github.com/jrigden/Seeds_of_Infinity" data_dict['meta']['license'] = "http://unlicense.org" #data_dict['meta']['time_created'] = int(time.time()) data_dict['meta']['title'] = title data_dict['data'] = data data_json = json.dumps(data_dict, ensure_ascii=False, indent=4, separators=(',', ': ')) return data_json def save_json(data, filename, title): file_path = os.path.join(JSON_PATH, filename) file_gz_path = os.path.join(JSON_PATH, filename + ".gz") data_json = json_formatter(data, title) with io.open(file_path, 'w', encoding='utf-8') as f: f.write(data_json) f_in = open(file_path, 'rb') f_out = gzip.open(file_gz_path, 'w') f_out.writelines(f_in) f_out.close() f_in.close() class Chain(object): def __init__(self): self.chain = {} def build_link(self, current_item, next_item): if not current_item in self.chain: self.chain[current_item] = [] self.chain[current_item].append(next_item) def generate_item(self, current_item): item = random.choice(self.chain[current_item]) return item def generate_series(self): series_active = True series = [] current_item = None while series_active: current_item = self.generate_item(current_item) if current_item is None: series_active = False else: series.append(current_item) return series def build_one_letter_chain(list_of_words): chain = Chain() for word in list_of_words: chain.build_link(None, word[0]) word_length = len(word) for i in range(word_length): try: chain.build_link(word[i], word[i+1]) except IndexError: chain.build_link(word[i], None) return chain def generate_first_name_json(): first_names = [] first_names.extend(NAMES['first_names']['feminine']) first_names.extend(NAMES['first_names']['masculine']) first_names = list(set(first_names)) chain = build_one_letter_chain(first_names) save_json(chain.chain, "first_name_chain.json", "first_name_chain") def generate_gendered_first_name_json(gender): first_names = NAMES['first_names'][gender] first_names = list(set(first_names)) chain = build_one_letter_chain(first_names) title = gender + "_first_name_chain" save_json(chain.chain, title + ".json", title) def generate_last_name_json(): last_names = NAMES['last_names'] last_names = list(set(last_names)) chain = build_one_letter_chain(last_names) save_json(chain.chain, "last_name_chain.json", "last_name_chain") generate_first_name_json() generate_gendered_first_name_json('feminine') generate_gendered_first_name_json('masculine') generate_last_name_json()
[ "gzip.open", "os.path.realpath", "requests_cache.install_cache", "json.dumps", "random.choice", "requests.get", "io.open", "os.path.join" ]
[((111, 141), 'requests_cache.install_cache', 'requests_cache.install_cache', ([], {}), '()\n', (139, 141), False, 'import requests_cache\n'), ((153, 239), 'requests.get', 'requests.get', (['"""https://rpg.rigden.us/seeds_of_infinity/resources/json/names.json"""'], {}), "(\n 'https://rpg.rigden.us/seeds_of_infinity/resources/json/names.json')\n", (165, 239), False, 'import requests\n'), ((335, 366), 'os.path.join', 'os.path.join', (['HOME_PATH', '"""json"""'], {}), "(HOME_PATH, 'json')\n", (347, 366), False, 'import os\n'), ((295, 321), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (311, 321), False, 'import os\n'), ((774, 849), 'json.dumps', 'json.dumps', (['data_dict'], {'ensure_ascii': '(False)', 'indent': '(4)', 'separators': "(',', ': ')"}), "(data_dict, ensure_ascii=False, indent=4, separators=(',', ': '))\n", (784, 849), False, 'import json\n'), ((926, 959), 'os.path.join', 'os.path.join', (['JSON_PATH', 'filename'], {}), '(JSON_PATH, filename)\n', (938, 959), False, 'import os\n'), ((979, 1020), 'os.path.join', 'os.path.join', (['JSON_PATH', "(filename + '.gz')"], {}), "(JSON_PATH, filename + '.gz')\n", (991, 1020), False, 'import os\n'), ((1194, 1222), 'gzip.open', 'gzip.open', (['file_gz_path', '"""w"""'], {}), "(file_gz_path, 'w')\n", (1203, 1222), False, 'import gzip\n'), ((1074, 1115), 'io.open', 'io.open', (['file_path', '"""w"""'], {'encoding': '"""utf-8"""'}), "(file_path, 'w', encoding='utf-8')\n", (1081, 1115), False, 'import io\n'), ((1603, 1642), 'random.choice', 'random.choice', (['self.chain[current_item]'], {}), '(self.chain[current_item])\n', (1616, 1642), False, 'import random\n')]
import argparse import os import pathlib import typing import pycspr from pycspr import NodeClient from pycspr import NodeConnection from pycspr.crypto import KeyAlgorithm from pycspr.types import CL_ByteArray from pycspr.types import CL_U256 from pycspr.types import Deploy from pycspr.types import DeployParameters from pycspr.types import ModuleBytes from pycspr.types import PrivateKey from pycspr.types import PublicKey from pycspr.types import StoredContractByHash # Path to NCTL network assets. _PATH_TO_NCTL_ASSETS = pathlib.Path(os.getenv("NCTL")) / "assets" / "net-1" # CLI argument parser. _ARGS = argparse.ArgumentParser("Demo illustrating how to install an ERC-20 smart contract.") # CLI argument: path to contract operator secret key - defaults to NCTL faucet. _ARGS.add_argument( "--operator-secret-key-path", default=_PATH_TO_NCTL_ASSETS / "faucet" / "secret_key.pem", dest="path_to_operator_secret_key", help="Path to operator's secret_key.pem file.", type=str, ) # CLI argument: type of contract operator secret key - defaults to ED25519. _ARGS.add_argument( "--operator-secret-key-type", default=KeyAlgorithm.ED25519.name, dest="type_of_operator_secret_key", help="Type of operator's secret key.", type=str, ) # CLI argument: path to user to whom tokens will be transferred - defaults to NCTL user 1. _ARGS.add_argument( "--user-public-key-path", default=_PATH_TO_NCTL_ASSETS / "users" / "user-1" / "public_key_hex", dest="path_to_user_public_key", help="Path to user's public_key_hex file.", type=str, ) # CLI argument: name of target chain - defaults to NCTL chain. _ARGS.add_argument( "--chain", default="casper-net-1", dest="chain_name", help="Name of target chain.", type=str, ) # CLI argument: amount in motes to be offered as payment. _ARGS.add_argument( "--payment", default=int(1e9), dest="deploy_payment", help="Amount in motes to be offered as payment.", type=int, ) # CLI argument: host address of target node - defaults to NCTL node 1. _ARGS.add_argument( "--node-host", default="localhost", dest="node_host", help="Host address of target node.", type=str, ) # CLI argument: Node API JSON-RPC port - defaults to 11101 @ NCTL node 1. _ARGS.add_argument( "--node-port-rpc", default=11101, dest="node_port_rpc", help="Node API JSON-RPC port. Typically 7777 on most nodes.", type=int, ) # CLI argument: amount of ERC-20 tokens to be transferred to user.. _ARGS.add_argument( "--amount", default=int(2e9), dest="amount", help="Amount of ERC-20 tokens to be transferred to user.", type=int, ) def _main(args: argparse.Namespace): """Main entry point. :param args: Parsed command line arguments. """ # Set node client. client: NodeClient = _get_client(args) # Set contract operator / user. operator, user = _get_operator_and_user_keys(args) # Set contract hash. contract_hash: bytes = _get_contract_hash(args, client, operator) # Set deploy. deploy: Deploy = _get_deploy(args, contract_hash, operator, user) # Approve deploy. deploy.approve(operator) # Dispatch deploy to a node. client.send_deploy(deploy) print("-" * 72) print(f"Deploy dispatched to node [{args.node_host}]: {deploy.hash.hex()}") print("-" * 72) def _get_client(args: argparse.Namespace) -> NodeClient: """Returns a pycspr client instance. """ return NodeClient(NodeConnection( host=args.node_host, port_rpc=args.node_port_rpc, )) def _get_operator_and_user_keys(args: argparse.Namespace) -> typing.Tuple[PrivateKey, PublicKey]: """Returns the smart contract operator's private key. """ operator = pycspr.parse_private_key( args.path_to_operator_secret_key, args.type_of_operator_secret_key, ) user = pycspr.parse_public_key( args.path_to_user_public_key, ) return operator, user def _get_contract_hash( args: argparse.Namespace, client: NodeClient, operator: PrivateKey ) -> bytes: """Returns on-chain contract identifier. """ # Query operator account for a named key == ERC20 & return parsed named key value. account_info = client.get_account_info(operator.account_key) for named_key in account_info["named_keys"]: if named_key["name"] == "ERC20": return bytes.fromhex(named_key["key"][5:]) raise ValueError("ERC-20 uninstalled ... see how_tos/how_to_install_a_contract.py") def _get_deploy( args: argparse.Namespace, contract_hash: bytes, operator: PrivateKey, user: PublicKey ) -> Deploy: """Returns delegation deploy to be dispatched to a node. """ # Set standard deploy parameters. params: DeployParameters = pycspr.create_deploy_parameters( account=operator, chain_name=args.chain_name ) # Set payment logic. payment: ModuleBytes = pycspr.create_standard_payment(args.deploy_payment) # Set session logic. session: StoredContractByHash = StoredContractByHash( entry_point="transfer", hash=contract_hash, args={ "amount": CL_U256(args.amount), "recipient": CL_ByteArray(user.account_hash) } ) return pycspr.create_deploy(params, payment, session) # Entry point. if __name__ == "__main__": _main(_ARGS.parse_args())
[ "pycspr.create_deploy_parameters", "pycspr.parse_public_key", "pycspr.parse_private_key", "argparse.ArgumentParser", "pycspr.types.CL_ByteArray", "pycspr.NodeConnection", "pycspr.types.CL_U256", "pycspr.create_standard_payment", "pycspr.create_deploy", "os.getenv" ]
[((613, 703), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Demo illustrating how to install an ERC-20 smart contract."""'], {}), "(\n 'Demo illustrating how to install an ERC-20 smart contract.')\n", (636, 703), False, 'import argparse\n'), ((3829, 3926), 'pycspr.parse_private_key', 'pycspr.parse_private_key', (['args.path_to_operator_secret_key', 'args.type_of_operator_secret_key'], {}), '(args.path_to_operator_secret_key, args.\n type_of_operator_secret_key)\n', (3853, 3926), False, 'import pycspr\n'), ((3960, 4013), 'pycspr.parse_public_key', 'pycspr.parse_public_key', (['args.path_to_user_public_key'], {}), '(args.path_to_user_public_key)\n', (3983, 4013), False, 'import pycspr\n'), ((4890, 4967), 'pycspr.create_deploy_parameters', 'pycspr.create_deploy_parameters', ([], {'account': 'operator', 'chain_name': 'args.chain_name'}), '(account=operator, chain_name=args.chain_name)\n', (4921, 4967), False, 'import pycspr\n'), ((5047, 5098), 'pycspr.create_standard_payment', 'pycspr.create_standard_payment', (['args.deploy_payment'], {}), '(args.deploy_payment)\n', (5077, 5098), False, 'import pycspr\n'), ((5387, 5433), 'pycspr.create_deploy', 'pycspr.create_deploy', (['params', 'payment', 'session'], {}), '(params, payment, session)\n', (5407, 5433), False, 'import pycspr\n'), ((3558, 3622), 'pycspr.NodeConnection', 'NodeConnection', ([], {'host': 'args.node_host', 'port_rpc': 'args.node_port_rpc'}), '(host=args.node_host, port_rpc=args.node_port_rpc)\n', (3572, 3622), False, 'from pycspr import NodeConnection\n'), ((541, 558), 'os.getenv', 'os.getenv', (['"""NCTL"""'], {}), "('NCTL')\n", (550, 558), False, 'import os\n'), ((5280, 5300), 'pycspr.types.CL_U256', 'CL_U256', (['args.amount'], {}), '(args.amount)\n', (5287, 5300), False, 'from pycspr.types import CL_U256\n'), ((5327, 5358), 'pycspr.types.CL_ByteArray', 'CL_ByteArray', (['user.account_hash'], {}), '(user.account_hash)\n', (5339, 5358), False, 'from pycspr.types import CL_ByteArray\n')]
# View more python tutorials on my Youtube and Youku channel!!! # Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg # Youku video tutorial: http://i.youku.com/pythontutorial # 12 - contours """ Please note, this script is for python3+. If you are using python2+, please modify it accordingly. Tutorial reference: http://www.scipy-lectures.org/intro/matplotlib/matplotlib.html """ import matplotlib.pyplot as plt import numpy as np def f(x,y): # the height function return (1 - x / 2 + x**5 + y**3) * np.exp(-x**2 -y**2) n = 256 x = np.linspace(-3, 3, n) y = np.linspace(-3, 3, n) X,Y = np.meshgrid(x, y) # use plt.contourf to filling contours # X, Y and value for (X,Y) point plt.contourf(X, Y, f(X, Y), 8, alpha=.75, cmap=plt.cm.hot) # use plt.contour to add contour lines C = plt.contour(X, Y, f(X, Y), 8, colors='black', linewidth=.5) # adding label plt.clabel(C, inline=True, fontsize=10) plt.xticks(()) plt.yticks(()) plt.show()
[ "matplotlib.pyplot.clabel", "numpy.meshgrid", "matplotlib.pyplot.show", "matplotlib.pyplot.yticks", "numpy.exp", "numpy.linspace", "matplotlib.pyplot.xticks" ]
[((576, 597), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', 'n'], {}), '(-3, 3, n)\n', (587, 597), True, 'import numpy as np\n'), ((602, 623), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', 'n'], {}), '(-3, 3, n)\n', (613, 623), True, 'import numpy as np\n'), ((630, 647), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (641, 647), True, 'import numpy as np\n'), ((899, 938), 'matplotlib.pyplot.clabel', 'plt.clabel', (['C'], {'inline': '(True)', 'fontsize': '(10)'}), '(C, inline=True, fontsize=10)\n', (909, 938), True, 'import matplotlib.pyplot as plt\n'), ((940, 954), 'matplotlib.pyplot.xticks', 'plt.xticks', (['()'], {}), '(())\n', (950, 954), True, 'import matplotlib.pyplot as plt\n'), ((955, 969), 'matplotlib.pyplot.yticks', 'plt.yticks', (['()'], {}), '(())\n', (965, 969), True, 'import matplotlib.pyplot as plt\n'), ((970, 980), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (978, 980), True, 'import matplotlib.pyplot as plt\n'), ((543, 567), 'numpy.exp', 'np.exp', (['(-x ** 2 - y ** 2)'], {}), '(-x ** 2 - y ** 2)\n', (549, 567), True, 'import numpy as np\n')]
import kfp import kfp.dsl as dsl from kfp.components import create_component_from_func import kfp.components as comp IMAGE = 'salazar99/python-kubeflow:latest' DATA_URL = 'https://gs-kubeflow-pipelines.nyc3.digitaloceanspaces.com/clean-spam-data.csv' # Download data # def download_data(source_path: str, output_csv: comp.OutputPath('CSV')): # import pandas as pd # data = pd.read_csv(source_path) # print(output_csv) # data.to_csv(output_csv, index=False) # download_op = create_component_from_func(func=download_data, # base_image=IMAGE) web_downloader_op = kfp.components.load_component_from_url( 'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/web/Download/component.yaml') # Preprocess and store data def preprocess_data(source_path: comp.InputPath('CSV'), x_train_output_path: str, x_test_output_path: str, y_train_output_path: str, y_test_output_path: str): from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.feature_selection import SelectKBest from sklearn.feature_selection import f_classif from sklearn.model_selection import train_test_split from typing import List import pandas as pd import numpy as np # Load and split data data = pd.read_csv(source_path + '.csv') x_train, x_test, y_train, y_test = train_test_split(data['text'], data['label'], test_size=0.2) # Convert to required format x_train = list(x_train) y_train = y_train.to_numpy() x_test = list(x_test) y_test = y_test.to_numpy() # Function for preprocessing data def ngram_vectorize(train_text: List[str], train_labels: np.ndarray, test_text: List[str]): # Arguments for vectorizor kwargs = { 'ngram_range': NGRAM_RANGE, # Use 1-grams + 2-grams. 'dtype': 'int32', 'strip_accents': 'unicode', 'decode_error': 'replace', 'analyzer': TOKEN_MODE, # Split text into word tokens. 'min_df': MIN_DOCUMENT_FREQUENCY, } vectorizer = TfidfVectorizer(**kwargs) # Vectorize training text x_train = vectorizer.fit_transform(train_text) # Vectorize test text x_test = vectorizer.transform(test_text) # Select top k features selector = SelectKBest(f_classif, k=TOP_K) selector.fit(x_train, train_labels) x_train = selector.transform(x_train).astype('float32') x_test = selector.transform(x_test).astype('float32') return x_train, x_test # Preprocess data x_train, x_test = ngram_vectorize(x_train, y_train, x_test) # Save data np.save(x_train, x_train_output_path) np.save(x_test, x_test_output_path) np.save(y_train, y_train_output_path) np.save(y_test, y_test_output_path) preprocess_op = create_component_from_func(func=preprocess_data, base_image=IMAGE) # Train model # Evaluate model # Save model # Build pipeline @dsl.pipeline( name="SMS Spam Detection Model Pipeline", description="Train an MLP to detect spam messages from csv data" ) def pipeline(url=DATA_URL): download = web_downloader_op(url=url) preprocess = preprocess_op(download.outputs['data'], 'x_train.npy', 'x_test.npy', 'y_train.npy', 'y_test.npy').after(download) if __name__ == '__main__': kfp.compiler.Compiler().compile( pipeline_func=pipeline, package_path='pipeline.yaml' )
[ "numpy.save", "pandas.read_csv", "sklearn.model_selection.train_test_split", "sklearn.feature_extraction.text.TfidfVectorizer", "kfp.components.create_component_from_func", "kfp.compiler.Compiler", "kfp.components.InputPath", "kfp.components.load_component_from_url", "sklearn.feature_selection.SelectKBest", "kfp.dsl.pipeline" ]
[((621, 771), 'kfp.components.load_component_from_url', 'kfp.components.load_component_from_url', (['"""https://raw.githubusercontent.com/kubeflow/pipelines/master/components/web/Download/component.yaml"""'], {}), "(\n 'https://raw.githubusercontent.com/kubeflow/pipelines/master/components/web/Download/component.yaml'\n )\n", (659, 771), False, 'import kfp\n'), ((2955, 3021), 'kfp.components.create_component_from_func', 'create_component_from_func', ([], {'func': 'preprocess_data', 'base_image': 'IMAGE'}), '(func=preprocess_data, base_image=IMAGE)\n', (2981, 3021), False, 'from kfp.components import create_component_from_func\n'), ((3129, 3254), 'kfp.dsl.pipeline', 'dsl.pipeline', ([], {'name': '"""SMS Spam Detection Model Pipeline"""', 'description': '"""Train an MLP to detect spam messages from csv data"""'}), "(name='SMS Spam Detection Model Pipeline', description=\n 'Train an MLP to detect spam messages from csv data')\n", (3141, 3254), True, 'import kfp.dsl as dsl\n'), ((1377, 1410), 'pandas.read_csv', 'pd.read_csv', (["(source_path + '.csv')"], {}), "(source_path + '.csv')\n", (1388, 1410), True, 'import pandas as pd\n'), ((1450, 1510), 'sklearn.model_selection.train_test_split', 'train_test_split', (["data['text']", "data['label']"], {'test_size': '(0.2)'}), "(data['text'], data['label'], test_size=0.2)\n", (1466, 1510), False, 'from sklearn.model_selection import train_test_split\n'), ((2778, 2815), 'numpy.save', 'np.save', (['x_train', 'x_train_output_path'], {}), '(x_train, x_train_output_path)\n', (2785, 2815), True, 'import numpy as np\n'), ((2820, 2855), 'numpy.save', 'np.save', (['x_test', 'x_test_output_path'], {}), '(x_test, x_test_output_path)\n', (2827, 2855), True, 'import numpy as np\n'), ((2860, 2897), 'numpy.save', 'np.save', (['y_train', 'y_train_output_path'], {}), '(y_train, y_train_output_path)\n', (2867, 2897), True, 'import numpy as np\n'), ((2902, 2937), 'numpy.save', 'np.save', (['y_test', 'y_test_output_path'], {}), '(y_test, y_test_output_path)\n', (2909, 2937), True, 'import numpy as np\n'), ((829, 850), 'kfp.components.InputPath', 'comp.InputPath', (['"""CSV"""'], {}), "('CSV')\n", (843, 850), True, 'import kfp.components as comp\n'), ((2177, 2202), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {}), '(**kwargs)\n', (2192, 2202), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n'), ((2432, 2463), 'sklearn.feature_selection.SelectKBest', 'SelectKBest', (['f_classif'], {'k': 'TOP_K'}), '(f_classif, k=TOP_K)\n', (2443, 2463), False, 'from sklearn.feature_selection import SelectKBest\n'), ((3569, 3592), 'kfp.compiler.Compiler', 'kfp.compiler.Compiler', ([], {}), '()\n', (3590, 3592), False, 'import kfp\n')]
import sys import os import platform import threading import socket import pytest from Pyro5 import config, socketutil # determine ipv6 capability has_ipv6 = socket.has_ipv6 if has_ipv6: s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) try: s.connect(("::1", 53)) s.close() socket.getaddrinfo("localhost", 53, socket.AF_INET6) except socket.error: has_ipv6 = False class TestSocketutil: @classmethod def setup_class(cls): config.POLLTIMEOUT = 0.1 def testGetIP(self): config.PREFER_IP_VERSION = 4 myip = socketutil.get_ip_address("") assert len(str(myip)) > 4 myip = socketutil.get_ip_address("", workaround127=True) assert len(str(myip)) > 4 assert not str(myip).startswith("127.") addr = socketutil.get_ip_address("127.0.0.1", workaround127=False) assert "127.0.0.1" == str(addr) assert addr.version == 4 addr = socketutil.get_ip_address("127.0.0.1", workaround127=True) assert "127.0.0.1" != str(addr) assert addr.version == 4 def testGetIP6(self): if not has_ipv6: pytest.skip("no ipv6 capability") addr = socketutil.get_ip_address("::1", version=6) assert addr.version == 6 assert ":" in str(addr) addr = socketutil.get_ip_address("localhost", version=6) assert addr.version == 6 assert ":" in str(addr) def testGetInterface(self): addr = socketutil.get_interface("localhost") assert addr.version == 4 assert str(addr).startswith("127.") assert str(addr.ip).startswith("127.0") assert str(addr.network).startswith("127.0") if has_ipv6: addr = socketutil.get_interface("::1") assert addr.version == 6 assert ":" in str(addr) assert ":" in str(addr.ip) assert ":" in str(addr.network) def testUnusedPort(self): port1 = socketutil.find_probably_unused_port() port2 = socketutil.find_probably_unused_port() assert port1 > 0 assert port1 != port2 port1 = socketutil.find_probably_unused_port(socktype=socket.SOCK_DGRAM) port2 = socketutil.find_probably_unused_port(socktype=socket.SOCK_DGRAM) assert port1 > 0 assert port1 != port2 def testUnusedPort6(self): if not has_ipv6: pytest.skip("no ipv6 capability") port1 = socketutil.find_probably_unused_port(family=socket.AF_INET6) port2 = socketutil.find_probably_unused_port(family=socket.AF_INET6) assert port1 > 0 assert port1 != port2 port1 = socketutil.find_probably_unused_port(family=socket.AF_INET6, socktype=socket.SOCK_DGRAM) port2 = socketutil.find_probably_unused_port(family=socket.AF_INET6, socktype=socket.SOCK_DGRAM) assert port1 > 0 assert port1 != port2 def testBindUnusedPort(self): sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) port1 = socketutil.bind_unused_port(sock1) port2 = socketutil.bind_unused_port(sock2) assert port1 > 0 assert port1 != port2 assert sock1.getsockname() == ("127.0.0.1", port1) sock1.close() sock2.close() def testBindUnusedPort6(self): if not has_ipv6: pytest.skip("no ipv6 capability") sock1 = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock2 = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) port1 = socketutil.bind_unused_port(sock1) port2 = socketutil.bind_unused_port(sock2) assert port1 > 0 assert port1 != port2 host, port, _, _ = sock1.getsockname() assert ":" in host assert port1 == port sock1.close() sock2.close() def testCreateUnboundSockets(self): s = socketutil.create_socket() assert socket.AF_INET == s.family bs = socketutil.create_bc_socket() assert socket.AF_INET == bs.family try: host, port = s.getsockname() # can either fail with socket.error or return (host,0) assert 0 == port except socket.error: pass try: host, port = bs.getsockname() # can either fail with socket.error or return (host,0) assert 0 == port except socket.error: pass s.close() bs.close() def testCreateUnboundSockets6(self): if not has_ipv6: pytest.skip("no ipv6 capability") s = socketutil.create_socket(ipv6=True) assert socket.AF_INET6 == s.family bs = socketutil.create_bc_socket(ipv6=True) assert socket.AF_INET6 == bs.family try: host, port, _, _ = s.getsockname() # can either fail with socket.error or return (host,0) assert 0 == port except socket.error: pass try: host, port, _, _ = bs.getsockname() # can either fail with socket.error or return (host,0) assert 0 == port except socket.error: pass s.close() bs.close() def testCreateBoundSockets(self): s = socketutil.create_socket(bind=('127.0.0.1', 0)) assert socket.AF_INET == s.family bs = socketutil.create_bc_socket(bind=('127.0.0.1', 0)) assert '127.0.0.1' == s.getsockname()[0] assert '127.0.0.1' == bs.getsockname()[0] s.close() bs.close() with pytest.raises(ValueError): socketutil.create_socket(bind=('localhost', 12345), connect=('localhost', 1234)) def testCreateBoundSockets6(self): if not has_ipv6: pytest.skip("no ipv6 capability") s = socketutil.create_socket(bind=('::1', 0)) assert socket.AF_INET6 == s.family bs = socketutil.create_bc_socket(bind=('::1', 0)) assert ':' in s.getsockname()[0] assert ':' in bs.getsockname()[0] s.close() bs.close() with pytest.raises(ValueError): socketutil.create_socket(bind=('::1', 12345), connect=('::1', 1234)) def testCreateBoundUnixSockets(self): if not hasattr(socket, "AF_UNIX"): pytest.skip("no unix domain sockets capability") SOCKNAME = "test_unixsocket" if os.path.exists(SOCKNAME): os.remove(SOCKNAME) s = socketutil.create_socket(bind=SOCKNAME) assert socket.AF_UNIX == s.family assert SOCKNAME == s.getsockname() s.close() if os.path.exists(SOCKNAME): os.remove(SOCKNAME) with pytest.raises(ValueError): socketutil.create_socket(bind=SOCKNAME, connect=SOCKNAME) def testAbstractNamespace(self): if not hasattr(socket, "AF_UNIX") and not sys.platform.startswith("linux"): pytest.skip("no unix domain sockets capability, and not Linux") SOCKNAME = "\0test_unixsocket_abstract_ns" # mind the \0 at the start s = socketutil.create_socket(bind=SOCKNAME) assert bytes(SOCKNAME, "ascii") == s.getsockname() s.close() def testSend(self): ss = socketutil.create_socket(bind=("localhost", 0)) port = ss.getsockname()[1] cs = socketutil.create_socket(connect=("localhost", port)) socketutil.send_data(cs, b"foobar!" * 10) cs.shutdown(socket.SHUT_WR) a = ss.accept() data = socketutil.receive_data(a[0], 5) assert b"fooba" == data data = socketutil.receive_data(a[0], 5) assert b"r!foo" == data a[0].close() ss.close() cs.close() def testSendUnix(self): if not hasattr(socket, "AF_UNIX"): pytest.skip("no unix domain sockets capability") SOCKNAME = "test_unixsocket" if os.path.exists(SOCKNAME): os.remove(SOCKNAME) ss = socketutil.create_socket(bind=SOCKNAME) cs = socketutil.create_socket(connect=SOCKNAME) socketutil.send_data(cs, b"foobar!" * 10) cs.shutdown(socket.SHUT_WR) a = ss.accept() data = socketutil.receive_data(a[0], 5) assert b"fooba" == data data = socketutil.receive_data(a[0], 5) assert b"r!foo" == data a[0].close() ss.close() cs.close() if os.path.exists(SOCKNAME): os.remove(SOCKNAME) def testBroadcast(self): ss = socketutil.create_bc_socket((None, 0)) port = ss.getsockname()[1] cs = socketutil.create_bc_socket() for bcaddr in config.BROADCAST_ADDRS: try: cs.sendto(b"monkey", 0, (bcaddr, port)) except socket.error as x: err = getattr(x, "errno", x.args[0]) # handle some errno that some platforms like to throw if err not in socketutil.ERRNO_EADDRNOTAVAIL and err not in socketutil.ERRNO_EADDRINUSE: raise data, _ = ss.recvfrom(500) assert b"monkey" == data cs.close() ss.close() def testMsgWaitallProblems(self): ss = socketutil.create_socket(bind=("localhost", 0), timeout=2) port = ss.getsockname()[1] cs = socketutil.create_socket(connect=("localhost", port), timeout=2) a = ss.accept() # test some sizes that might be problematic with MSG_WAITALL and check that they work fine for size in [1000, 10000, 32000, 32768, 32780, 41950, 41952, 42000, 65000, 65535, 65600, 80000]: socketutil.send_data(cs, b"x" * size) data = socketutil.receive_data(a[0], size) socketutil.send_data(a[0], data) data = socketutil.receive_data(cs, size) assert size == len(data) a[0].close() ss.close() cs.close() def testMsgWaitallProblems2(self): class ReceiveThread(threading.Thread): def __init__(self, sock, sizes): super(ReceiveThread, self).__init__() self.sock = sock self.sizes = sizes def run(self): cs, _ = self.sock.accept() for size in self.sizes: data = socketutil.receive_data(cs, size) socketutil.send_data(cs, data) cs.close() ss = socketutil.create_socket(bind=("localhost", 0)) SIZES = [1000, 10000, 32000, 32768, 32780, 41950, 41952, 42000, 65000, 65535, 65600, 80000, 999999] serverthread = ReceiveThread(ss, SIZES) serverthread.setDaemon(True) serverthread.start() port = ss.getsockname()[1] cs = socketutil.create_socket(connect=("localhost", port), timeout=2) # test some sizes that might be problematic with MSG_WAITALL and check that they work fine for size in SIZES: socketutil.send_data(cs, b"x" * size) data = socketutil.receive_data(cs, size) assert size == len(data) serverthread.join() ss.close() cs.close() def testMsgWaitAllConfig(self): if platform.system() == "Windows": # default config should be False on these platforms even though socket.MSG_WAITALL might exist assert not socketutil.USE_MSG_WAITALL else: # on all other platforms, default config should be True (as long as socket.MSG_WAITALL exists) if hasattr(socket, "MSG_WAITALL"): assert socketutil.USE_MSG_WAITALL else: assert not socketutil.USE_MSG_WAITALL
[ "Pyro5.socketutil.send_data", "Pyro5.socketutil.create_bc_socket", "os.remove", "sys.platform.startswith", "socket.socket", "Pyro5.socketutil.create_socket", "os.path.exists", "pytest.skip", "platform.system", "Pyro5.socketutil.receive_data", "socket.getaddrinfo", "Pyro5.socketutil.find_probably_unused_port", "pytest.raises", "Pyro5.socketutil.get_interface", "Pyro5.socketutil.get_ip_address", "Pyro5.socketutil.bind_unused_port" ]
[((197, 246), 'socket.socket', 'socket.socket', (['socket.AF_INET6', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET6, socket.SOCK_DGRAM)\n', (210, 246), False, 'import socket\n'), ((313, 365), 'socket.getaddrinfo', 'socket.getaddrinfo', (['"""localhost"""', '(53)', 'socket.AF_INET6'], {}), "('localhost', 53, socket.AF_INET6)\n", (331, 365), False, 'import socket\n'), ((594, 623), 'Pyro5.socketutil.get_ip_address', 'socketutil.get_ip_address', (['""""""'], {}), "('')\n", (619, 623), False, 'from Pyro5 import config, socketutil\n'), ((673, 722), 'Pyro5.socketutil.get_ip_address', 'socketutil.get_ip_address', (['""""""'], {'workaround127': '(True)'}), "('', workaround127=True)\n", (698, 722), False, 'from Pyro5 import config, socketutil\n'), ((820, 879), 'Pyro5.socketutil.get_ip_address', 'socketutil.get_ip_address', (['"""127.0.0.1"""'], {'workaround127': '(False)'}), "('127.0.0.1', workaround127=False)\n", (845, 879), False, 'from Pyro5 import config, socketutil\n'), ((968, 1026), 'Pyro5.socketutil.get_ip_address', 'socketutil.get_ip_address', (['"""127.0.0.1"""'], {'workaround127': '(True)'}), "('127.0.0.1', workaround127=True)\n", (993, 1026), False, 'from Pyro5 import config, socketutil\n'), ((1213, 1256), 'Pyro5.socketutil.get_ip_address', 'socketutil.get_ip_address', (['"""::1"""'], {'version': '(6)'}), "('::1', version=6)\n", (1238, 1256), False, 'from Pyro5 import config, socketutil\n'), ((1337, 1386), 'Pyro5.socketutil.get_ip_address', 'socketutil.get_ip_address', (['"""localhost"""'], {'version': '(6)'}), "('localhost', version=6)\n", (1362, 1386), False, 'from Pyro5 import config, socketutil\n'), ((1500, 1537), 'Pyro5.socketutil.get_interface', 'socketutil.get_interface', (['"""localhost"""'], {}), "('localhost')\n", (1524, 1537), False, 'from Pyro5 import config, socketutil\n'), ((1991, 2029), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {}), '()\n', (2027, 2029), False, 'from Pyro5 import config, socketutil\n'), ((2046, 2084), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {}), '()\n', (2082, 2084), False, 'from Pyro5 import config, socketutil\n'), ((2156, 2220), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {'socktype': 'socket.SOCK_DGRAM'}), '(socktype=socket.SOCK_DGRAM)\n', (2192, 2220), False, 'from Pyro5 import config, socketutil\n'), ((2237, 2301), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {'socktype': 'socket.SOCK_DGRAM'}), '(socktype=socket.SOCK_DGRAM)\n', (2273, 2301), False, 'from Pyro5 import config, socketutil\n'), ((2476, 2536), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {'family': 'socket.AF_INET6'}), '(family=socket.AF_INET6)\n', (2512, 2536), False, 'from Pyro5 import config, socketutil\n'), ((2553, 2613), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {'family': 'socket.AF_INET6'}), '(family=socket.AF_INET6)\n', (2589, 2613), False, 'from Pyro5 import config, socketutil\n'), ((2685, 2778), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {'family': 'socket.AF_INET6', 'socktype': 'socket.SOCK_DGRAM'}), '(family=socket.AF_INET6, socktype=\n socket.SOCK_DGRAM)\n', (2721, 2778), False, 'from Pyro5 import config, socketutil\n'), ((2790, 2883), 'Pyro5.socketutil.find_probably_unused_port', 'socketutil.find_probably_unused_port', ([], {'family': 'socket.AF_INET6', 'socktype': 'socket.SOCK_DGRAM'}), '(family=socket.AF_INET6, socktype=\n socket.SOCK_DGRAM)\n', (2826, 2883), False, 'from Pyro5 import config, socketutil\n'), ((2985, 3034), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (2998, 3034), False, 'import socket\n'), ((3051, 3100), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (3064, 3100), False, 'import socket\n'), ((3117, 3151), 'Pyro5.socketutil.bind_unused_port', 'socketutil.bind_unused_port', (['sock1'], {}), '(sock1)\n', (3144, 3151), False, 'from Pyro5 import config, socketutil\n'), ((3168, 3202), 'Pyro5.socketutil.bind_unused_port', 'socketutil.bind_unused_port', (['sock2'], {}), '(sock2)\n', (3195, 3202), False, 'from Pyro5 import config, socketutil\n'), ((3484, 3534), 'socket.socket', 'socket.socket', (['socket.AF_INET6', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET6, socket.SOCK_STREAM)\n', (3497, 3534), False, 'import socket\n'), ((3551, 3601), 'socket.socket', 'socket.socket', (['socket.AF_INET6', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET6, socket.SOCK_STREAM)\n', (3564, 3601), False, 'import socket\n'), ((3618, 3652), 'Pyro5.socketutil.bind_unused_port', 'socketutil.bind_unused_port', (['sock1'], {}), '(sock1)\n', (3645, 3652), False, 'from Pyro5 import config, socketutil\n'), ((3669, 3703), 'Pyro5.socketutil.bind_unused_port', 'socketutil.bind_unused_port', (['sock2'], {}), '(sock2)\n', (3696, 3703), False, 'from Pyro5 import config, socketutil\n'), ((3959, 3985), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {}), '()\n', (3983, 3985), False, 'from Pyro5 import config, socketutil\n'), ((4041, 4070), 'Pyro5.socketutil.create_bc_socket', 'socketutil.create_bc_socket', ([], {}), '()\n', (4068, 4070), False, 'from Pyro5 import config, socketutil\n'), ((4669, 4704), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'ipv6': '(True)'}), '(ipv6=True)\n', (4693, 4704), False, 'from Pyro5 import config, socketutil\n'), ((4761, 4799), 'Pyro5.socketutil.create_bc_socket', 'socketutil.create_bc_socket', ([], {'ipv6': '(True)'}), '(ipv6=True)\n', (4788, 4799), False, 'from Pyro5 import config, socketutil\n'), ((5337, 5384), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('127.0.0.1', 0)"}), "(bind=('127.0.0.1', 0))\n", (5361, 5384), False, 'from Pyro5 import config, socketutil\n'), ((5440, 5490), 'Pyro5.socketutil.create_bc_socket', 'socketutil.create_bc_socket', ([], {'bind': "('127.0.0.1', 0)"}), "(bind=('127.0.0.1', 0))\n", (5467, 5490), False, 'from Pyro5 import config, socketutil\n'), ((5883, 5924), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('::1', 0)"}), "(bind=('::1', 0))\n", (5907, 5924), False, 'from Pyro5 import config, socketutil\n'), ((5981, 6025), 'Pyro5.socketutil.create_bc_socket', 'socketutil.create_bc_socket', ([], {'bind': "('::1', 0)"}), "(bind=('::1', 0))\n", (6008, 6025), False, 'from Pyro5 import config, socketutil\n'), ((6462, 6486), 'os.path.exists', 'os.path.exists', (['SOCKNAME'], {}), '(SOCKNAME)\n', (6476, 6486), False, 'import os\n'), ((6532, 6571), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': 'SOCKNAME'}), '(bind=SOCKNAME)\n', (6556, 6571), False, 'from Pyro5 import config, socketutil\n'), ((6686, 6710), 'os.path.exists', 'os.path.exists', (['SOCKNAME'], {}), '(SOCKNAME)\n', (6700, 6710), False, 'import os\n'), ((7143, 7182), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': 'SOCKNAME'}), '(bind=SOCKNAME)\n', (7167, 7182), False, 'from Pyro5 import config, socketutil\n'), ((7298, 7345), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('localhost', 0)"}), "(bind=('localhost', 0))\n", (7322, 7345), False, 'from Pyro5 import config, socketutil\n'), ((7394, 7447), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'connect': "('localhost', port)"}), "(connect=('localhost', port))\n", (7418, 7447), False, 'from Pyro5 import config, socketutil\n'), ((7456, 7497), 'Pyro5.socketutil.send_data', 'socketutil.send_data', (['cs', "(b'foobar!' * 10)"], {}), "(cs, b'foobar!' * 10)\n", (7476, 7497), False, 'from Pyro5 import config, socketutil\n'), ((7573, 7605), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['a[0]', '(5)'], {}), '(a[0], 5)\n', (7596, 7605), False, 'from Pyro5 import config, socketutil\n'), ((7653, 7685), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['a[0]', '(5)'], {}), '(a[0], 5)\n', (7676, 7685), False, 'from Pyro5 import config, socketutil\n'), ((7958, 7982), 'os.path.exists', 'os.path.exists', (['SOCKNAME'], {}), '(SOCKNAME)\n', (7972, 7982), False, 'import os\n'), ((8029, 8068), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': 'SOCKNAME'}), '(bind=SOCKNAME)\n', (8053, 8068), False, 'from Pyro5 import config, socketutil\n'), ((8082, 8124), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'connect': 'SOCKNAME'}), '(connect=SOCKNAME)\n', (8106, 8124), False, 'from Pyro5 import config, socketutil\n'), ((8133, 8174), 'Pyro5.socketutil.send_data', 'socketutil.send_data', (['cs', "(b'foobar!' * 10)"], {}), "(cs, b'foobar!' * 10)\n", (8153, 8174), False, 'from Pyro5 import config, socketutil\n'), ((8250, 8282), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['a[0]', '(5)'], {}), '(a[0], 5)\n', (8273, 8282), False, 'from Pyro5 import config, socketutil\n'), ((8330, 8362), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['a[0]', '(5)'], {}), '(a[0], 5)\n', (8353, 8362), False, 'from Pyro5 import config, socketutil\n'), ((8465, 8489), 'os.path.exists', 'os.path.exists', (['SOCKNAME'], {}), '(SOCKNAME)\n', (8479, 8489), False, 'import os\n'), ((8566, 8604), 'Pyro5.socketutil.create_bc_socket', 'socketutil.create_bc_socket', (['(None, 0)'], {}), '((None, 0))\n', (8593, 8604), False, 'from Pyro5 import config, socketutil\n'), ((8653, 8682), 'Pyro5.socketutil.create_bc_socket', 'socketutil.create_bc_socket', ([], {}), '()\n', (8680, 8682), False, 'from Pyro5 import config, socketutil\n'), ((9252, 9310), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('localhost', 0)", 'timeout': '(2)'}), "(bind=('localhost', 0), timeout=2)\n", (9276, 9310), False, 'from Pyro5 import config, socketutil\n'), ((9359, 9423), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'connect': "('localhost', port)", 'timeout': '(2)'}), "(connect=('localhost', port), timeout=2)\n", (9383, 9423), False, 'from Pyro5 import config, socketutil\n'), ((10469, 10516), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('localhost', 0)"}), "(bind=('localhost', 0))\n", (10493, 10516), False, 'from Pyro5 import config, socketutil\n'), ((10787, 10851), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'connect': "('localhost', port)", 'timeout': '(2)'}), "(connect=('localhost', port), timeout=2)\n", (10811, 10851), False, 'from Pyro5 import config, socketutil\n'), ((1164, 1197), 'pytest.skip', 'pytest.skip', (['"""no ipv6 capability"""'], {}), "('no ipv6 capability')\n", (1175, 1197), False, 'import pytest\n'), ((1756, 1787), 'Pyro5.socketutil.get_interface', 'socketutil.get_interface', (['"""::1"""'], {}), "('::1')\n", (1780, 1787), False, 'from Pyro5 import config, socketutil\n'), ((2426, 2459), 'pytest.skip', 'pytest.skip', (['"""no ipv6 capability"""'], {}), "('no ipv6 capability')\n", (2437, 2459), False, 'import pytest\n'), ((3434, 3467), 'pytest.skip', 'pytest.skip', (['"""no ipv6 capability"""'], {}), "('no ipv6 capability')\n", (3445, 3467), False, 'import pytest\n'), ((4623, 4656), 'pytest.skip', 'pytest.skip', (['"""no ipv6 capability"""'], {}), "('no ipv6 capability')\n", (4634, 4656), False, 'import pytest\n'), ((5640, 5665), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5653, 5665), False, 'import pytest\n'), ((5679, 5764), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('localhost', 12345)", 'connect': "('localhost', 1234)"}), "(bind=('localhost', 12345), connect=('localhost', 1234)\n )\n", (5703, 5764), False, 'from Pyro5 import config, socketutil\n'), ((5837, 5870), 'pytest.skip', 'pytest.skip', (['"""no ipv6 capability"""'], {}), "('no ipv6 capability')\n", (5848, 5870), False, 'import pytest\n'), ((6159, 6184), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6172, 6184), False, 'import pytest\n'), ((6198, 6266), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': "('::1', 12345)", 'connect': "('::1', 1234)"}), "(bind=('::1', 12345), connect=('::1', 1234))\n", (6222, 6266), False, 'from Pyro5 import config, socketutil\n'), ((6365, 6413), 'pytest.skip', 'pytest.skip', (['"""no unix domain sockets capability"""'], {}), "('no unix domain sockets capability')\n", (6376, 6413), False, 'import pytest\n'), ((6500, 6519), 'os.remove', 'os.remove', (['SOCKNAME'], {}), '(SOCKNAME)\n', (6509, 6519), False, 'import os\n'), ((6724, 6743), 'os.remove', 'os.remove', (['SOCKNAME'], {}), '(SOCKNAME)\n', (6733, 6743), False, 'import os\n'), ((6757, 6782), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6770, 6782), False, 'import pytest\n'), ((6796, 6853), 'Pyro5.socketutil.create_socket', 'socketutil.create_socket', ([], {'bind': 'SOCKNAME', 'connect': 'SOCKNAME'}), '(bind=SOCKNAME, connect=SOCKNAME)\n', (6820, 6853), False, 'from Pyro5 import config, socketutil\n'), ((6988, 7051), 'pytest.skip', 'pytest.skip', (['"""no unix domain sockets capability, and not Linux"""'], {}), "('no unix domain sockets capability, and not Linux')\n", (6999, 7051), False, 'import pytest\n'), ((7861, 7909), 'pytest.skip', 'pytest.skip', (['"""no unix domain sockets capability"""'], {}), "('no unix domain sockets capability')\n", (7872, 7909), False, 'import pytest\n'), ((7996, 8015), 'os.remove', 'os.remove', (['SOCKNAME'], {}), '(SOCKNAME)\n', (8005, 8015), False, 'import os\n'), ((8503, 8522), 'os.remove', 'os.remove', (['SOCKNAME'], {}), '(SOCKNAME)\n', (8512, 8522), False, 'import os\n'), ((9664, 9701), 'Pyro5.socketutil.send_data', 'socketutil.send_data', (['cs', "(b'x' * size)"], {}), "(cs, b'x' * size)\n", (9684, 9701), False, 'from Pyro5 import config, socketutil\n'), ((9721, 9756), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['a[0]', 'size'], {}), '(a[0], size)\n', (9744, 9756), False, 'from Pyro5 import config, socketutil\n'), ((9769, 9801), 'Pyro5.socketutil.send_data', 'socketutil.send_data', (['a[0]', 'data'], {}), '(a[0], data)\n', (9789, 9801), False, 'from Pyro5 import config, socketutil\n'), ((9821, 9854), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['cs', 'size'], {}), '(cs, size)\n', (9844, 9854), False, 'from Pyro5 import config, socketutil\n'), ((10990, 11027), 'Pyro5.socketutil.send_data', 'socketutil.send_data', (['cs', "(b'x' * size)"], {}), "(cs, b'x' * size)\n", (11010, 11027), False, 'from Pyro5 import config, socketutil\n'), ((11047, 11080), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['cs', 'size'], {}), '(cs, size)\n', (11070, 11080), False, 'from Pyro5 import config, socketutil\n'), ((11232, 11249), 'platform.system', 'platform.system', ([], {}), '()\n', (11247, 11249), False, 'import platform\n'), ((6942, 6974), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (6965, 6974), False, 'import sys\n'), ((10343, 10376), 'Pyro5.socketutil.receive_data', 'socketutil.receive_data', (['cs', 'size'], {}), '(cs, size)\n', (10366, 10376), False, 'from Pyro5 import config, socketutil\n'), ((10397, 10427), 'Pyro5.socketutil.send_data', 'socketutil.send_data', (['cs', 'data'], {}), '(cs, data)\n', (10417, 10427), False, 'from Pyro5 import config, socketutil\n')]
# coding=utf-8 """ Attempt to creat an RNG that picks numbers like humans # favors date parts (1-31, 1-12, 19/20, 50-99/00-18) # seeks/avoids patterns (i.e. 1,2,3,4,5 or 2,22,32,42) # favors past winning numbers # favors culturally meaningful numbers, 777, 888, etc. # http://ww2.amstat.org/publications/jse/v13n2/mecklin.html Past number strategies Choosing winning combinations from previous draws Modifying previous winning combinations (e.g. adding 1 to each number in a previous winning combination) Choosing “hot” or “cold” numbers (a statistically nonsensical strategy suggested in many of the lay books about lotteries) "Numerology" factors of 1, 2, 3, etc., eg. 7, 14, 21, etc Choosing arithmetic progressions (e.g. 1-2-3-4-5-6 or 2-5-8-11-14-17) Choosing powers of 2 (e.g. 1-2-4-8-16-32) Choosing perfect squares (e.g. 1-4-9-16-25-36) Choosing all prime numbers (e.g. 2-3-5-7-11-13) Choosing Fibonacci numbers (e.g. 1-2-3-5-8-13) Dates Choosing only numbers that are less than or equal to 31; many people choose numbers based on birthdays, anniversaries, etc. """ from typing import List, Set import random from datetime import date, timedelta class BiasedRng(object): """ Birthday numbers. Simulate what happens if you only pick birthday numbers. Against an unbiased state RNG, you expect a higher risk of capped payouts from too many people winning, otherwise no change-- all numbers are just as good as any other. If you were playing keno with friends (and not the state), one player could exploit the fact that the other is using a BiasedRng. """ def __init__(self) -> None: pass def dates_only(self) -> List[int]: """ Pick number drawn from a biased RNG :return: """ pick = set() # type: Set[int] while len(pick) < 20: birthday = self.random_birthday() pick.update(self.keno_range(birthday)) pick_list = [x for x in pick] pick_list.sort() return pick_list def random_birthday(self) -> date: """ Birthdays for people up to 80 years old. :return: """ days = 365 * 80 oldest_birthday = date.today() - timedelta(days=days) days_since_random_birthday = random.randint(0, days) return oldest_birthday + timedelta(days=days_since_random_birthday) def keno_range(self, value: date) -> List[int]: """ Break date into part and return set of parts from 1 to 80 :param value: :return: """ full_range = { int(str(value.year)[0:2]), int(str(value.year)[2:]), value.day, value.month, } - {0} pick = set() for x in full_range: if x <= 80: pick.add(x) return list(pick) if __name__ == "__main__": rng = BiasedRng() bday = rng.random_birthday() print(bday) print(rng.keno_range(bday)) print(rng.dates_only())
[ "datetime.date.today", "random.randint", "datetime.timedelta" ]
[((2279, 2302), 'random.randint', 'random.randint', (['(0)', 'days'], {}), '(0, days)\n', (2293, 2302), False, 'import random\n'), ((2206, 2218), 'datetime.date.today', 'date.today', ([], {}), '()\n', (2216, 2218), False, 'from datetime import date, timedelta\n'), ((2221, 2241), 'datetime.timedelta', 'timedelta', ([], {'days': 'days'}), '(days=days)\n', (2230, 2241), False, 'from datetime import date, timedelta\n'), ((2336, 2378), 'datetime.timedelta', 'timedelta', ([], {'days': 'days_since_random_birthday'}), '(days=days_since_random_birthday)\n', (2345, 2378), False, 'from datetime import date, timedelta\n')]
# coding=utf-8 # Copyright 2020 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Tests for tensorflow_datasets.core._sharded_files.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow_datasets import testing from tensorflow_datasets.core import _sharded_files class GetReadInstructionsTest(testing.TestCase): def test_read_all_even_sharding(self): # Even sharding res = _sharded_files.get_read_instructions( 0, 12, ["f1", "f2", "f3"], [4, 4, 4]) self.assertEqual(res, [ {"filename": "f1", "skip": 0, "take": -1}, {"filename": "f2", "skip": 0, "take": -1}, {"filename": "f3", "skip": 0, "take": -1}, ]) def test_read_all_empty_shard(self): res = _sharded_files.get_read_instructions( 0, 12, ["f1", "f2", "f3", "f4"], [4, 4, 0, 4]) self.assertEqual(res, [ {"filename": "f1", "skip": 0, "take": -1}, {"filename": "f2", "skip": 0, "take": -1}, {"filename": "f4", "skip": 0, "take": -1}, ]) def test_from1_to10(self): res = _sharded_files.get_read_instructions( 1, 10, ["f1", "f2", "f3", "f4"], [4, 4, 0, 4]) self.assertEqual(res, [ {"filename": "f1", "skip": 1, "take": -1}, {"filename": "f2", "skip": 0, "take": -1}, {"filename": "f4", "skip": 0, "take": 2}, ]) def test_nothing_to_read(self): res = _sharded_files.get_read_instructions( 0, 0, ["f1", "f2", "f3", "f4"], [0, 3, 0, 2]) self.assertEqual(res, []) res = _sharded_files.get_read_instructions( 4, 4, ["f1", "f2", "f3", "f4"], [0, 3, 0, 2]) self.assertEqual(res, []) res = _sharded_files.get_read_instructions( 5, 5, ["f1", "f2", "f3", "f4"], [0, 3, 0, 2]) self.assertEqual(res, []) if __name__ == "__main__": testing.test_main()
[ "tensorflow_datasets.testing.test_main", "tensorflow_datasets.core._sharded_files.get_read_instructions" ]
[((2399, 2418), 'tensorflow_datasets.testing.test_main', 'testing.test_main', ([], {}), '()\n', (2416, 2418), False, 'from tensorflow_datasets import testing\n'), ((1014, 1088), 'tensorflow_datasets.core._sharded_files.get_read_instructions', '_sharded_files.get_read_instructions', (['(0)', '(12)', "['f1', 'f2', 'f3']", '[4, 4, 4]'], {}), "(0, 12, ['f1', 'f2', 'f3'], [4, 4, 4])\n", (1050, 1088), False, 'from tensorflow_datasets.core import _sharded_files\n'), ((1336, 1423), 'tensorflow_datasets.core._sharded_files.get_read_instructions', '_sharded_files.get_read_instructions', (['(0)', '(12)', "['f1', 'f2', 'f3', 'f4']", '[4, 4, 0, 4]'], {}), "(0, 12, ['f1', 'f2', 'f3', 'f4'], [4, 4,\n 0, 4])\n", (1372, 1423), False, 'from tensorflow_datasets.core import _sharded_files\n'), ((1657, 1744), 'tensorflow_datasets.core._sharded_files.get_read_instructions', '_sharded_files.get_read_instructions', (['(1)', '(10)', "['f1', 'f2', 'f3', 'f4']", '[4, 4, 0, 4]'], {}), "(1, 10, ['f1', 'f2', 'f3', 'f4'], [4, 4,\n 0, 4])\n", (1693, 1744), False, 'from tensorflow_datasets.core import _sharded_files\n'), ((1982, 2068), 'tensorflow_datasets.core._sharded_files.get_read_instructions', '_sharded_files.get_read_instructions', (['(0)', '(0)', "['f1', 'f2', 'f3', 'f4']", '[0, 3, 0, 2]'], {}), "(0, 0, ['f1', 'f2', 'f3', 'f4'], [0, 3,\n 0, 2])\n", (2018, 2068), False, 'from tensorflow_datasets.core import _sharded_files\n'), ((2114, 2200), 'tensorflow_datasets.core._sharded_files.get_read_instructions', '_sharded_files.get_read_instructions', (['(4)', '(4)', "['f1', 'f2', 'f3', 'f4']", '[0, 3, 0, 2]'], {}), "(4, 4, ['f1', 'f2', 'f3', 'f4'], [0, 3,\n 0, 2])\n", (2150, 2200), False, 'from tensorflow_datasets.core import _sharded_files\n'), ((2246, 2332), 'tensorflow_datasets.core._sharded_files.get_read_instructions', '_sharded_files.get_read_instructions', (['(5)', '(5)', "['f1', 'f2', 'f3', 'f4']", '[0, 3, 0, 2]'], {}), "(5, 5, ['f1', 'f2', 'f3', 'f4'], [0, 3,\n 0, 2])\n", (2282, 2332), False, 'from tensorflow_datasets.core import _sharded_files\n')]
#!/usr/bin/env python3 # coding=utf-8 import glob import sys import os.path import subprocess from utils import Fore, parse_image_arg, probe_wsl, get_label, path_trans, handle_sigint # handle arguments handle_sigint() if len(sys.argv) < 2: # print usage information print('usage: ./switch.py image[:tag]') # check if there are any installations basedir, lxpath, bashpath = probe_wsl(True) if basedir: #fix basedir to add LocalState\rootfs basedir = os.path.join(basedir, 'LocalState') names = glob.glob(os.path.join(basedir, 'rootfs*')) not_debian = True has_debian = False if len(names) > 0: print('\nThe following distributions are currently installed:\n') for name in names: active = os.path.basename(name) == 'rootfs' name = get_label(name).split('_', 1) if len(name) != 2: continue if name[0] == 'debian' and name[1] == '9': has_debian = True if active: not_debian = False print(' - %s%s%s:%s%s%s%s' % (Fore.YELLOW, name[0], Fore.RESET, Fore.YELLOW, name[1], Fore.RESET, ('%s*%s' % (Fore.GREEN, Fore.RESET) if active else ''))) if not_debian: print() if has_debian: print('To switch back to the default distribution, specify %sdebian%s:%s9%s as the argument.' % (Fore.YELLOW, Fore.RESET, Fore.YELLOW, Fore.RESET)) else: print('You do not seem to have the default distribution installed anymore.\nTo reinstall it, run %slxrun /uninstall%s and %slxrun /install%s from the command prompt.' % (Fore.GREEN, Fore.RESET, Fore.GREEN, Fore.RESET)) sys.exit(-1) image, tag, fname, label = parse_image_arg(sys.argv[1], False) # sanity checks print('%s[*]%s Probing the Linux subsystem...' % (Fore.GREEN, Fore.RESET)) basedir, lxpath, bashpath = probe_wsl() #fix basedir to add LocalState\rootfs basedir = os.path.join(basedir, 'LocalState') # read label of current distribution clabel = get_label(os.path.join(basedir, 'rootfs')) if not clabel: clabel = 'debian_9' if label == clabel: print('%s[!]%s No %s/.switch_label%s found, and the target rootfs is %subuntu%s:%strusty%s. Cannot continue.' % (Fore.RED, Fore.RESET, Fore.BLUE, Fore.RESET, Fore.YELLOW, Fore.RESET, Fore.YELLOW, Fore.RESET)) print('%s[!]%s To fix this, run %secho some_tag > /.switch_label%s (replacing %ssome_tag%s with something like %sdebian_sid%s) from the current Bash terminal.' % (Fore.RED, Fore.RESET, Fore.GREEN, Fore.RESET, Fore.GREEN, Fore.RESET, Fore.GREEN, Fore.RESET)) sys.exit(-1) else: print('%s[!]%s No %s/.switch_label%s found, assuming current rootfs is %subuntu%s:%strusty%s.' % (Fore.RED, Fore.RESET, Fore.BLUE, Fore.RESET, Fore.YELLOW, Fore.RESET, Fore.YELLOW, Fore.RESET)) # sanity checks, take two if clabel == label: print('%s[!]%s The %s%s%s:%s%s%s rootfs is the current installation.' % (Fore.YELLOW, Fore.RESET, Fore.YELLOW, image, Fore.RESET, Fore.YELLOW, tag, Fore.RESET)) sys.exit(-1) if not os.path.isdir(os.path.join(basedir, 'rootfs_' + label)): print('%s[!]%s The %s%s%s:%s%s%s rootfs is not installed.' % (Fore.RED, Fore.RESET, Fore.YELLOW, image, Fore.RESET, Fore.YELLOW, tag, Fore.RESET)) sys.exit(-1) # do the switch print('%s[*]%s Moving current %srootfs%s to %srootfs_%s%s...' % (Fore.GREEN, Fore.RESET, Fore.BLUE, Fore.RESET, Fore.BLUE, clabel, Fore.RESET)) try: subprocess.check_output(['cmd', '/C', 'move', path_trans(os.path.join(basedir, 'rootfs')), path_trans(os.path.join(basedir, 'rootfs_' + clabel))]) except subprocess.CalledProcessError as err: print('%s[!]%s Failed to backup current %srootfs%s: %s' % (Fore.RED, Fore.RESET, Fore.BLUE, Fore.RESET, err)) sys.exit(-1) print('%s[*]%s Moving desired %srootfs_%s%s to %srootfs%s...' % (Fore.GREEN, Fore.RESET, Fore.BLUE, label, Fore.RESET, Fore.BLUE, Fore.RESET)) try: subprocess.check_output(['cmd', '/C', 'move', path_trans(os.path.join(basedir, 'rootfs_' + label)), path_trans(os.path.join(basedir, 'rootfs'))]) except subprocess.CalledProcessError as err: print('%s[!]%s Failed to switch to new %srootfs%s: %s' % (Fore.RED, Fore.RESET, Fore.BLUE, Fore.RESET, err)) print('%s[*]%s Rolling back to old %srootfs%s...' % (Fore.YELLOW, Fore.RESET, Fore.BLUE, Fore.RESET)) try: subprocess.check_output(['cmd', '/C', 'move', path_trans(os.path.join(basedir, 'rootfs_' + clabel)), path_trans(os.path.join(basedir, 'rootfs'))]) except subprocess.CalledProcessError as err: print('%s[!]%s Failed to roll back to old %srootfs%s: %s' % (Fore.RED, Fore.RESET, Fore.BLUE, Fore.RESET, err)) print('%s[!]%s You are now the proud owner of one broken Linux subsystem! To fix it, run %slxrun /uninstall%s and %slxrun /install%s from the command prompt.' % (Fore.RED, Fore.RESET, Fore.GREEN, Fore.RESET, Fore.GREEN, Fore.RESET)) sys.exit(-1)
[ "utils.handle_sigint", "utils.get_label", "utils.parse_image_arg", "utils.probe_wsl", "sys.exit" ]
[((204, 219), 'utils.handle_sigint', 'handle_sigint', ([], {}), '()\n', (217, 219), False, 'from utils import Fore, parse_image_arg, probe_wsl, get_label, path_trans, handle_sigint\n'), ((1590, 1625), 'utils.parse_image_arg', 'parse_image_arg', (['sys.argv[1]', '(False)'], {}), '(sys.argv[1], False)\n', (1605, 1625), False, 'from utils import Fore, parse_image_arg, probe_wsl, get_label, path_trans, handle_sigint\n'), ((1748, 1759), 'utils.probe_wsl', 'probe_wsl', ([], {}), '()\n', (1757, 1759), False, 'from utils import Fore, parse_image_arg, probe_wsl, get_label, path_trans, handle_sigint\n'), ((384, 399), 'utils.probe_wsl', 'probe_wsl', (['(True)'], {}), '(True)\n', (393, 399), False, 'from utils import Fore, parse_image_arg, probe_wsl, get_label, path_trans, handle_sigint\n'), ((1549, 1561), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (1557, 1561), False, 'import sys\n'), ((2894, 2906), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2902, 2906), False, 'import sys\n'), ((3121, 3133), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3129, 3133), False, 'import sys\n'), ((2467, 2479), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (2475, 2479), False, 'import sys\n'), ((3608, 3620), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (3616, 3620), False, 'import sys\n'), ((4731, 4743), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (4739, 4743), False, 'import sys\n'), ((773, 788), 'utils.get_label', 'get_label', (['name'], {}), '(name)\n', (782, 788), False, 'from utils import Fore, parse_image_arg, probe_wsl, get_label, path_trans, handle_sigint\n')]
# Generated by Django 3.0.7 on 2020-06-07 13:45 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Torrents', '0004_auto_20200607_1344'), ] operations = [ migrations.AlterField( model_name='uploadtorrents', name='uploader_name', field=models.CharField(blank=True, default='tabish', max_length=50), ), ]
[ "django.db.models.CharField" ]
[((352, 413), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '"""tabish"""', 'max_length': '(50)'}), "(blank=True, default='tabish', max_length=50)\n", (368, 413), False, 'from django.db import migrations, models\n')]
import sys from mitmproxy.platform import pf from . import tutils class TestLookup: def test_simple(self): if sys.platform == "freebsd10": p = tutils.test_data.path("data/pf02") d = open(p, "rb").read() else: p = tutils.test_data.path("data/pf01") d = open(p, "rb").read() assert pf.lookup("192.168.1.111", 40000, d) == ("5.5.5.5", 80) tutils.raises( "Could not resolve original destination", pf.lookup, "192.168.1.112", 40000, d) tutils.raises( "Could not resolve original destination", pf.lookup, "192.168.1.111", 40001, d)
[ "mitmproxy.platform.pf.lookup" ]
[((359, 395), 'mitmproxy.platform.pf.lookup', 'pf.lookup', (['"""192.168.1.111"""', '(40000)', 'd'], {}), "('192.168.1.111', 40000, d)\n", (368, 395), False, 'from mitmproxy.platform import pf\n')]
#!/usr/bin/python # # Copyright 2002-2021 Barcelona Supercomputing Center (www.bsc.es) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # -*- coding: utf-8 -*- import os import sys from pycompss.util.exceptions import PyCOMPSsException def test_get_optional_module_warning(): from pycompss.util.warnings.modules import get_optional_module_warning warning = get_optional_module_warning("UNITTEST_NAME", "UNITTEST_DESCRIPTION") assert isinstance(warning, str), "Optional module warning does NOT return a string" assert warning != "", "Optional module warning can not be empty" assert "UNITTEST_NAME" in warning, "Module name not in optional module warning" assert ( "UNITTEST_DESCRIPTION" in warning ), "Module description not in optional module warning" def test_show_optional_module_warning(): import pycompss.util.warnings.modules as warn # Hack - Add non existing package warn.OPTIONAL_MODULES["non_existing_package"] = "this is the description" stdout_backup = sys.stdout out_file = "warning.out" fd = open(out_file, "w") sys.stdout = fd warn.show_optional_module_warnings() # Cleanup sys.stdout = stdout_backup fd.close() del warn.OPTIONAL_MODULES["non_existing_package"] # Result check if os.path.exists(out_file) and os.path.getsize(out_file) > 0: # Non empty file exists - this is ok. os.remove(out_file) else: raise PyCOMPSsException("The warning has not been shown")
[ "os.remove", "pycompss.util.warnings.modules.show_optional_module_warnings", "os.path.getsize", "os.path.exists", "pycompss.util.exceptions.PyCOMPSsException", "pycompss.util.warnings.modules.get_optional_module_warning" ]
[((880, 948), 'pycompss.util.warnings.modules.get_optional_module_warning', 'get_optional_module_warning', (['"""UNITTEST_NAME"""', '"""UNITTEST_DESCRIPTION"""'], {}), "('UNITTEST_NAME', 'UNITTEST_DESCRIPTION')\n", (907, 948), False, 'from pycompss.util.warnings.modules import get_optional_module_warning\n'), ((1627, 1663), 'pycompss.util.warnings.modules.show_optional_module_warnings', 'warn.show_optional_module_warnings', ([], {}), '()\n', (1661, 1663), True, 'import pycompss.util.warnings.modules as warn\n'), ((1804, 1828), 'os.path.exists', 'os.path.exists', (['out_file'], {}), '(out_file)\n', (1818, 1828), False, 'import os\n'), ((1918, 1937), 'os.remove', 'os.remove', (['out_file'], {}), '(out_file)\n', (1927, 1937), False, 'import os\n'), ((1962, 2013), 'pycompss.util.exceptions.PyCOMPSsException', 'PyCOMPSsException', (['"""The warning has not been shown"""'], {}), "('The warning has not been shown')\n", (1979, 2013), False, 'from pycompss.util.exceptions import PyCOMPSsException\n'), ((1833, 1858), 'os.path.getsize', 'os.path.getsize', (['out_file'], {}), '(out_file)\n', (1848, 1858), False, 'import os\n')]
# ============================================================================== # Copyright 2019 - <NAME> # # NOTICE: Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # ============================================================================== """ RL (NeurIPS 2019) Dataset Builder - Base class responsible for generating the protocol buffers to be used by the model """ import logging import numpy as np from diplomacy import Map from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField from diplomacy_research.models.policy.base_policy_builder import BasePolicyBuilder from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, \ NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, \ get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state # Constants LOGGER = logging.getLogger(__name__) class BaseDatasetBuilder(BasePolicyBuilder): """ This object is responsible for maintaining the data and feeding it into the model """ @staticmethod def get_proto_fields(): """ Returns the proto fields used by this dataset builder """ # Creating proto fields proto_fields = { 'request_id': FixedProtoField([], None), 'player_seed': FixedProtoField([], np.int32), 'board_state': FixedProtoField([NB_NODES, NB_FEATURES], np.uint8), 'board_alignments': VarProtoField([NB_NODES * NB_SUPPLY_CENTERS], np.uint8), 'prev_orders_state': FixedProtoField([NB_PREV_ORDERS, NB_NODES, NB_ORDERS_FEATURES], np.uint8), 'decoder_inputs': VarProtoField([1 + NB_SUPPLY_CENTERS], np.int32), 'decoder_lengths': FixedProtoField([], np.int32), 'candidates': VarProtoField([None, MAX_CANDIDATES], np.int32), 'noise': FixedProtoField([], np.float32), 'temperature': FixedProtoField([], np.float32), 'dropout_rate': FixedProtoField([], np.float32), 'current_power': FixedProtoField([], np.int32), 'current_season': FixedProtoField([], np.int32), 'value_targets': FixedProtoField([], np.float32), 'context': VarProtoField([256 * 2 * 8], np.float32), 'messages': VarProtoField([1 + 1000], np.int32), 'message_lengths': FixedProtoField([], np.int32), 'senders': VarProtoField([1000], np.uint8), 'recipients': VarProtoField([1000], np.uint8), 'next_conversant': FixedProtoField([2], np.int32) } return proto_fields @staticmethod def get_feedable_item(locs, state_proto, power_name, phase_history_proto, possible_orders_proto, **kwargs): """ Computes and return a feedable item (to be fed into the feedable queue) :param locs: A list of locations for which we want orders :param state_proto: A `.proto.game.State` representation of the state of the game. :param power_name: The power name for which we want the orders and the state values :param phase_history_proto: A list of `.proto.game.PhaseHistory`. This represents prev phases. :param possible_orders_proto: A `proto.game.PossibleOrders` object representing possible order for each loc. :param kwargs: Additional optional kwargs: - player_seed: The seed to apply to the player to compute a deterministic mask. - noise: The sigma of the additional noise to apply to the intermediate layers (i.e. sigma * epsilon) - temperature: The temperature to apply to the logits. (Default to 0. for deterministic/greedy) - dropout_rate: The amount of dropout to apply to the inputs/outputs of the decoder. :return: A feedable item, with feature names as key and numpy arrays as values """ # pylint: disable=too-many-branches # Converting to state space map_object = Map(state_proto.map) board_state = proto_to_board_state(state_proto, map_object) # Building the decoder length # For adjustment phase, we restrict the number of builds/disbands to what is allowed by the game engine in_adjustment_phase = state_proto.name[-1] == 'A' nb_builds = state_proto.builds[power_name].count nb_homes = len(state_proto.builds[power_name].homes) # If we are in adjustment phase, making sure the locs are the orderable locs (and not the policy locs) if in_adjustment_phase: orderable_locs, _ = get_orderable_locs_for_powers(state_proto, [power_name]) if sorted(locs) != sorted(orderable_locs): if locs: LOGGER.warning('Adj. phase requires orderable locs. Got %s. Expected %s.', locs, orderable_locs) locs = orderable_locs # WxxxA - We can build units # WxxxA - We can disband units # Other phase if in_adjustment_phase and nb_builds >= 0: decoder_length = min(nb_builds, nb_homes) elif in_adjustment_phase and nb_builds < 0: decoder_length = abs(nb_builds) else: decoder_length = len(locs) # Computing the candidates for the policy if possible_orders_proto: # Adjustment Phase - Use all possible orders for each location. if in_adjustment_phase: # Building a list of all orders for all locations adj_orders = [] for loc in locs: adj_orders += possible_orders_proto[loc].value # Computing the candidates candidates = [get_order_based_mask(adj_orders)] * decoder_length # Regular phase - Compute candidates for each location else: candidates = [] for loc in locs: candidates += [get_order_based_mask(possible_orders_proto[loc].value)] # We don't have possible orders, so we cannot compute candidates # This might be normal if we are only getting the state value or the next message to send else: candidates = [] for _ in range(decoder_length): candidates.append([]) # Prev orders state prev_orders_state = [] for phase_proto in reversed(phase_history_proto): if len(prev_orders_state) == NB_PREV_ORDERS: break if phase_proto.name[-1] == 'M': prev_orders_state = [proto_to_prev_orders_state(phase_proto, map_object)] + prev_orders_state for _ in range(NB_PREV_ORDERS - len(prev_orders_state)): prev_orders_state = [np.zeros((NB_NODES, NB_ORDERS_FEATURES), dtype=np.uint8)] + prev_orders_state prev_orders_state = np.array(prev_orders_state) # Building (order) decoder inputs [GO_ID] decoder_inputs = [GO_ID] # kwargs player_seed = kwargs.get('player_seed', 0) noise = kwargs.get('noise', 0.) temperature = kwargs.get('temperature', 0.) dropout_rate = kwargs.get('dropout_rate', 0.) # Building feedable data item = { 'player_seed': player_seed, 'board_state': board_state, 'board_alignments': get_board_alignments(locs, in_adjustment_phase=in_adjustment_phase, tokens_per_loc=1, decoder_length=decoder_length), 'prev_orders_state': prev_orders_state, 'decoder_inputs': decoder_inputs, 'decoder_lengths': decoder_length, 'candidates': candidates, 'noise': noise, 'temperature': temperature, 'dropout_rate': dropout_rate, 'current_power': POWER_VOCABULARY_KEY_TO_IX[power_name], 'current_season': get_current_season(state_proto) } # Return return item @property def proto_generation_callable(self): """ Returns a callable required for proto files generation. e.g. return generate_proto(saved_game_bytes, is_validation_set) Note: Callable args are - saved_game_bytes: A `.proto.game.SavedGame` object from the dataset - phase_ix: The index of the phase we want to process - is_validation_set: Boolean that indicates if we are generating the validation set Note: Used bytes_to_proto from diplomacy_research.utils.proto to convert bytes to proto The callable must return a list of tf.train.Example to put in the protocol buffer file """ raise NotImplementedError()
[ "diplomacy_research.models.state_space.get_current_season", "diplomacy_research.models.datasets.base_builder.VarProtoField", "diplomacy_research.models.state_space.get_orderable_locs_for_powers", "diplomacy_research.models.state_space.get_order_based_mask", "numpy.zeros", "diplomacy_research.models.state_space.get_board_alignments", "numpy.array", "diplomacy.Map", "diplomacy_research.models.datasets.base_builder.FixedProtoField", "diplomacy_research.models.state_space.proto_to_prev_orders_state", "diplomacy_research.models.state_space.proto_to_board_state", "logging.getLogger" ]
[((1513, 1540), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1530, 1540), False, 'import logging\n'), ((4610, 4630), 'diplomacy.Map', 'Map', (['state_proto.map'], {}), '(state_proto.map)\n', (4613, 4630), False, 'from diplomacy import Map\n'), ((4653, 4698), 'diplomacy_research.models.state_space.proto_to_board_state', 'proto_to_board_state', (['state_proto', 'map_object'], {}), '(state_proto, map_object)\n', (4673, 4698), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n'), ((7462, 7489), 'numpy.array', 'np.array', (['prev_orders_state'], {}), '(prev_orders_state)\n', (7470, 7489), True, 'import numpy as np\n'), ((1881, 1906), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'None'], {}), '([], None)\n', (1896, 1906), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((1935, 1964), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.int32'], {}), '([], np.int32)\n', (1950, 1964), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((1993, 2043), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[NB_NODES, NB_FEATURES]', 'np.uint8'], {}), '([NB_NODES, NB_FEATURES], np.uint8)\n', (2008, 2043), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2077, 2132), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[NB_NODES * NB_SUPPLY_CENTERS]', 'np.uint8'], {}), '([NB_NODES * NB_SUPPLY_CENTERS], np.uint8)\n', (2090, 2132), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2167, 2240), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[NB_PREV_ORDERS, NB_NODES, NB_ORDERS_FEATURES]', 'np.uint8'], {}), '([NB_PREV_ORDERS, NB_NODES, NB_ORDERS_FEATURES], np.uint8)\n', (2182, 2240), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2272, 2320), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[1 + NB_SUPPLY_CENTERS]', 'np.int32'], {}), '([1 + NB_SUPPLY_CENTERS], np.int32)\n', (2285, 2320), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2353, 2382), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.int32'], {}), '([], np.int32)\n', (2368, 2382), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2410, 2457), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[None, MAX_CANDIDATES]', 'np.int32'], {}), '([None, MAX_CANDIDATES], np.int32)\n', (2423, 2457), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2480, 2511), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.float32'], {}), '([], np.float32)\n', (2495, 2511), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2540, 2571), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.float32'], {}), '([], np.float32)\n', (2555, 2571), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2601, 2632), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.float32'], {}), '([], np.float32)\n', (2616, 2632), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2663, 2692), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.int32'], {}), '([], np.int32)\n', (2678, 2692), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2724, 2753), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.int32'], {}), '([], np.int32)\n', (2739, 2753), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2784, 2815), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.float32'], {}), '([], np.float32)\n', (2799, 2815), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2840, 2880), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[256 * 2 * 8]', 'np.float32'], {}), '([256 * 2 * 8], np.float32)\n', (2853, 2880), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2906, 2941), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[1 + 1000]', 'np.int32'], {}), '([1 + 1000], np.int32)\n', (2919, 2941), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((2974, 3003), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[]', 'np.int32'], {}), '([], np.int32)\n', (2989, 3003), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((3028, 3059), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[1000]', 'np.uint8'], {}), '([1000], np.uint8)\n', (3041, 3059), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((3087, 3118), 'diplomacy_research.models.datasets.base_builder.VarProtoField', 'VarProtoField', (['[1000]', 'np.uint8'], {}), '([1000], np.uint8)\n', (3100, 3118), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((3151, 3181), 'diplomacy_research.models.datasets.base_builder.FixedProtoField', 'FixedProtoField', (['[2]', 'np.int32'], {}), '([2], np.int32)\n', (3166, 3181), False, 'from diplomacy_research.models.datasets.base_builder import FixedProtoField, VarProtoField\n'), ((5202, 5258), 'diplomacy_research.models.state_space.get_orderable_locs_for_powers', 'get_orderable_locs_for_powers', (['state_proto', '[power_name]'], {}), '(state_proto, [power_name])\n', (5231, 5258), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n'), ((7952, 8072), 'diplomacy_research.models.state_space.get_board_alignments', 'get_board_alignments', (['locs'], {'in_adjustment_phase': 'in_adjustment_phase', 'tokens_per_loc': '(1)', 'decoder_length': 'decoder_length'}), '(locs, in_adjustment_phase=in_adjustment_phase,\n tokens_per_loc=1, decoder_length=decoder_length)\n', (7972, 8072), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n'), ((8621, 8652), 'diplomacy_research.models.state_space.get_current_season', 'get_current_season', (['state_proto'], {}), '(state_proto)\n', (8639, 8652), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n'), ((7356, 7412), 'numpy.zeros', 'np.zeros', (['(NB_NODES, NB_ORDERS_FEATURES)'], {'dtype': 'np.uint8'}), '((NB_NODES, NB_ORDERS_FEATURES), dtype=np.uint8)\n', (7364, 7412), True, 'import numpy as np\n'), ((6318, 6350), 'diplomacy_research.models.state_space.get_order_based_mask', 'get_order_based_mask', (['adj_orders'], {}), '(adj_orders)\n', (6338, 6350), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n'), ((6555, 6609), 'diplomacy_research.models.state_space.get_order_based_mask', 'get_order_based_mask', (['possible_orders_proto[loc].value'], {}), '(possible_orders_proto[loc].value)\n', (6575, 6609), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n'), ((7185, 7236), 'diplomacy_research.models.state_space.proto_to_prev_orders_state', 'proto_to_prev_orders_state', (['phase_proto', 'map_object'], {}), '(phase_proto, map_object)\n', (7211, 7236), False, 'from diplomacy_research.models.state_space import get_order_based_mask, proto_to_board_state, GO_ID, NB_NODES, NB_SUPPLY_CENTERS, POWER_VOCABULARY_KEY_TO_IX, MAX_CANDIDATES, NB_FEATURES, NB_ORDERS_FEATURES, NB_PREV_ORDERS, get_board_alignments, get_orderable_locs_for_powers, get_current_season, proto_to_prev_orders_state\n')]
# Generated by Django 3.0.2 on 2020-01-13 14:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('server_app', '0009_auto_20200113_1606'), ] operations = [ migrations.AlterField( model_name='film', name='pic_url', field=models.FileField(upload_to='film_pic/'), ), ]
[ "django.db.models.FileField" ]
[((338, 377), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': '"""film_pic/"""'}), "(upload_to='film_pic/')\n", (354, 377), False, 'from django.db import migrations, models\n')]
"""A setuptools based setup module. See: https://packaging.python.org/guides/distributing-packages-using-setuptools/ https://github.com/pypa/sampleproject """ from os import path from setuptools import setup, find_packages with open(path.join(path.abspath(path.dirname(__file__)), 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='Quandoo', version='1.3.5', description="A SDK for interacting with the Quandoo API, it is a work in progress", long_description=long_description, long_description_content_type='text/markdown', url='https://github.com/fraser-langton/Quandoo', author='<NAME>', author_email='<EMAIL>', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], keywords='quandoo api', packages=find_packages(exclude=['contrib', 'docs', 'tests', 'test']), python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4', install_requires=['requests', 'tzlocal', 'python-dotenv'], )
[ "os.path.dirname", "setuptools.find_packages" ]
[((1177, 1236), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['contrib', 'docs', 'tests', 'test']"}), "(exclude=['contrib', 'docs', 'tests', 'test'])\n", (1190, 1236), False, 'from setuptools import setup, find_packages\n'), ((259, 281), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (271, 281), False, 'from os import path\n')]
from django.urls import path from form_workshop.create_form.views import show_form_data urlpatterns = [ path('', show_form_data, name='show form') ]
[ "django.urls.path" ]
[((113, 155), 'django.urls.path', 'path', (['""""""', 'show_form_data'], {'name': '"""show form"""'}), "('', show_form_data, name='show form')\n", (117, 155), False, 'from django.urls import path\n')]
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ import os import sys from statefun import * current = os.path.dirname(os.path.realpath(__file__)) parent = os.path.dirname(current) sys.path.append(parent) import serve from models import * # uses isbn as target id @serve.functions.bind(typename="com.store.fn/order") async def order_book(context: Context, message: Message): # take the order order = message.as_type(Order.TYPE) # update the order status context.send( message_builder(target_typename="com.store.fn/order-updates", target_id=context.address.id, value=order, value_type=Order.TYPE)) # uses isbn as target id @serve.functions.bind(typename="com.store.fn/order-updates") async def order_updates(context: Context, message: Message): order_update = '' payment_request = dict() warehouse_request = '' if message.is_type(Order.TYPE): order = message.as_type(Order.TYPE) # print(f"order update '{OrderStatus(order.status).name.upper()}'", flush=True) order_update = f'{order.buyer}, sorry we couldnt process your order' if order.status == OrderStatus.RESERVE: order_update = f'{order.buyer}, Thank You. Your order for book {order.isbn} is being processed' elif order.status == OrderStatus.RESERVED: order_update = f'{order.buyer}, your order for book {order.isbn} is awaiting payment confirmation' # create a payment request payment_request = {'id': order.isbn, 'user': order.buyer, 'value': order.value, 'status': PaymentStatus.DEBIT} elif order.status == OrderStatus.NORESERVATION: order_update = f'{order.buyer}, couldnt process your order for book {order.isbn}. Amount will be refunded' # create a refund request payment_request = {'id': order.isbn, 'user': order.buyer, 'value': order.value, 'status': PaymentStatus.REFUND} elif order.status == OrderStatus.CONFIRMED: order_update = f'{order.buyer}, your order for book {order.isbn} is confirmed now' elif order.status == OrderStatus.NOSTOCK: order_update = f'{order.buyer}, book {order.isbn} you requested is out of stock now, please try again later' elif order.status == OrderStatus.DISPATCHED: order_update = f'{order.buyer}, book {order.isbn} is dispatched now' elif message.is_type(Payment): payment = message.as_type(Payment) if payment['status'] == PaymentStatus.DEBITED: order_update = f"{payment['user']}, payment for book {payment['id']} is confirmed now" # make a warehouse request to process the reservation order = Order(buyer=payment['user'], isbn=context.address.id, status=OrderStatus.PAID, value=payment['value']) warehouse_request = 'com.warehouse.fn/order' if payment['status'] == PaymentStatus.REFUNDED: order_update = f"{payment['user']}, amound {payment['value']} paid for {payment['id']} is refunded now" if payment_request: # send out a payment request context.send_egress( kafka_egress_message( typename="com.payments/orders", topic="payments", key=order.buyer, value=payment_request, value_type=Payment)) if warehouse_request: # make a warehouse request context.send( message_builder( target_typename=warehouse_request, target_id=context.address.id, value=order, value_type=Order.TYPE)) if order_update: # send out the order status message context.send_egress( kafka_egress_message( typename="com.bookstore/coms", topic="status", key=context.address.id, value=order_update)) if __name__ == '__main__': serve.run()
[ "sys.path.append", "serve.functions.bind", "serve.run", "os.path.dirname", "os.path.realpath" ]
[((1067, 1091), 'os.path.dirname', 'os.path.dirname', (['current'], {}), '(current)\n', (1082, 1091), False, 'import os\n'), ((1092, 1115), 'sys.path.append', 'sys.path.append', (['parent'], {}), '(parent)\n', (1107, 1115), False, 'import sys\n'), ((1179, 1230), 'serve.functions.bind', 'serve.functions.bind', ([], {'typename': '"""com.store.fn/order"""'}), "(typename='com.store.fn/order')\n", (1199, 1230), False, 'import serve\n'), ((1636, 1695), 'serve.functions.bind', 'serve.functions.bind', ([], {'typename': '"""com.store.fn/order-updates"""'}), "(typename='com.store.fn/order-updates')\n", (1656, 1695), False, 'import serve\n'), ((1030, 1056), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1046, 1056), False, 'import os\n'), ((4927, 4938), 'serve.run', 'serve.run', ([], {}), '()\n', (4936, 4938), False, 'import serve\n')]
import py from rpython.rtyper.lltypesystem import lltype, llmemory from rpython.memory.gc.incminimark import IncrementalMiniMarkGC from rpython.memory.gc.test.test_direct import BaseDirectGCTest from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY from rpython.rlib.rawrefcount import REFCNT_FROM_PYPY_LIGHT PYOBJ_HDR = IncrementalMiniMarkGC.PYOBJ_HDR PYOBJ_HDR_PTR = IncrementalMiniMarkGC.PYOBJ_HDR_PTR S = lltype.GcForwardReference() S.become(lltype.GcStruct('S', ('x', lltype.Signed), ('prev', lltype.Ptr(S)), ('next', lltype.Ptr(S)))) class TestRawRefCount(BaseDirectGCTest): GCClass = IncrementalMiniMarkGC def _collect(self, major, expected_trigger=0): if major: self.gc.collect() else: self.gc.minor_collection() count1 = len(self.trigger) self.gc.rrc_invoke_callback() count2 = len(self.trigger) assert count2 - count1 == expected_trigger def _rawrefcount_pair(self, intval, is_light=False, is_pyobj=False, create_old=False, create_immortal=False, force_external=False): if is_light: rc = REFCNT_FROM_PYPY_LIGHT else: rc = REFCNT_FROM_PYPY self.trigger = [] self.gc.rawrefcount_init(lambda: self.trigger.append(1)) # if create_immortal: p1 = lltype.malloc(S, immortal=True) else: saved = self.gc.nonlarge_max try: if force_external: self.gc.nonlarge_max = 1 p1 = self.malloc(S) finally: self.gc.nonlarge_max = saved p1.x = intval if create_immortal: self.consider_constant(p1) elif create_old: self.stackroots.append(p1) self._collect(major=False) p1 = self.stackroots.pop() p1ref = lltype.cast_opaque_ptr(llmemory.GCREF, p1) r1 = lltype.malloc(PYOBJ_HDR, flavor='raw', immortal=create_immortal) r1.ob_refcnt = rc r1.ob_pypy_link = 0 r1addr = llmemory.cast_ptr_to_adr(r1) if is_pyobj: assert not is_light self.gc.rawrefcount_create_link_pyobj(p1ref, r1addr) else: self.gc.rawrefcount_create_link_pypy(p1ref, r1addr) assert r1.ob_refcnt == rc assert r1.ob_pypy_link != 0 def check_alive(extra_refcount): assert r1.ob_refcnt == rc + extra_refcount assert r1.ob_pypy_link != 0 p1ref = self.gc.rawrefcount_to_obj(r1addr) p1 = lltype.cast_opaque_ptr(lltype.Ptr(S), p1ref) assert p1.x == intval if not is_pyobj: assert self.gc.rawrefcount_from_obj(p1ref) == r1addr else: assert self.gc.rawrefcount_from_obj(p1ref) == llmemory.NULL return p1 return p1, p1ref, r1, r1addr, check_alive def test_rawrefcount_objects_basic(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=True, create_old=old)) p2 = self.malloc(S) p2.x = 84 p2ref = lltype.cast_opaque_ptr(llmemory.GCREF, p2) r2 = lltype.malloc(PYOBJ_HDR, flavor='raw') r2.ob_refcnt = 1 r2.ob_pypy_link = 0 r2addr = llmemory.cast_ptr_to_adr(r2) # p2 and r2 are not linked assert r1.ob_pypy_link != 0 assert r2.ob_pypy_link == 0 assert self.gc.rawrefcount_from_obj(p1ref) == r1addr assert self.gc.rawrefcount_from_obj(p2ref) == llmemory.NULL assert self.gc.rawrefcount_to_obj(r1addr) == p1ref assert self.gc.rawrefcount_to_obj(r2addr) == lltype.nullptr( llmemory.GCREF.TO) lltype.free(r1, flavor='raw') lltype.free(r2, flavor='raw') def test_rawrefcount_objects_collection_survives_from_raw(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=True, create_old=old)) check_alive(0) r1.ob_refcnt += 1 self._collect(major=False) check_alive(+1) self._collect(major=True) check_alive(+1) r1.ob_refcnt -= 1 self._collect(major=False) p1 = check_alive(0) self._collect(major=True) py.test.raises(RuntimeError, "r1.ob_refcnt") # dead py.test.raises(RuntimeError, "p1.x") # dead self.gc.check_no_more_rawrefcount_state() assert self.trigger == [] assert self.gc.rawrefcount_next_dead() == llmemory.NULL def test_rawrefcount_dies_quickly(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=True, create_old=old)) check_alive(0) self._collect(major=False) if old: check_alive(0) self._collect(major=True) py.test.raises(RuntimeError, "r1.ob_refcnt") # dead py.test.raises(RuntimeError, "p1.x") # dead self.gc.check_no_more_rawrefcount_state() def test_rawrefcount_objects_collection_survives_from_obj(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=True, create_old=old)) check_alive(0) self.stackroots.append(p1) self._collect(major=False) check_alive(0) self._collect(major=True) check_alive(0) p1 = self.stackroots.pop() self._collect(major=False) check_alive(0) assert p1.x == 42 self._collect(major=True) py.test.raises(RuntimeError, "r1.ob_refcnt") # dead py.test.raises(RuntimeError, "p1.x") # dead self.gc.check_no_more_rawrefcount_state() def test_rawrefcount_objects_basic_old(self): self.test_rawrefcount_objects_basic(old=True) def test_rawrefcount_objects_collection_survives_from_raw_old(self): self.test_rawrefcount_objects_collection_survives_from_raw(old=True) def test_rawrefcount_dies_quickly_old(self): self.test_rawrefcount_dies_quickly(old=True) def test_rawrefcount_objects_collection_survives_from_obj_old(self): self.test_rawrefcount_objects_collection_survives_from_obj(old=True) def test_pypy_nonlight_survives_from_raw(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=False, create_old=old)) check_alive(0) r1.ob_refcnt += 1 self._collect(major=False) check_alive(+1) self._collect(major=True) check_alive(+1) r1.ob_refcnt -= 1 self._collect(major=False) p1 = check_alive(0) self._collect(major=True, expected_trigger=1) py.test.raises(RuntimeError, "p1.x") # dead assert r1.ob_refcnt == 1 # in the pending list assert r1.ob_pypy_link == 0 assert self.gc.rawrefcount_next_dead() == r1addr assert self.gc.rawrefcount_next_dead() == llmemory.NULL assert self.gc.rawrefcount_next_dead() == llmemory.NULL self.gc.check_no_more_rawrefcount_state() lltype.free(r1, flavor='raw') def test_pypy_nonlight_survives_from_obj(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=False, create_old=old)) check_alive(0) self.stackroots.append(p1) self._collect(major=False) check_alive(0) self._collect(major=True) check_alive(0) p1 = self.stackroots.pop() self._collect(major=False) check_alive(0) assert p1.x == 42 self._collect(major=True, expected_trigger=1) py.test.raises(RuntimeError, "p1.x") # dead assert r1.ob_refcnt == 1 assert r1.ob_pypy_link == 0 assert self.gc.rawrefcount_next_dead() == r1addr self.gc.check_no_more_rawrefcount_state() lltype.free(r1, flavor='raw') def test_pypy_nonlight_dies_quickly(self, old=False): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_light=False, create_old=old)) check_alive(0) if old: self._collect(major=False) check_alive(0) self._collect(major=True, expected_trigger=1) else: self._collect(major=False, expected_trigger=1) py.test.raises(RuntimeError, "p1.x") # dead assert r1.ob_refcnt == 1 assert r1.ob_pypy_link == 0 assert self.gc.rawrefcount_next_dead() == r1addr self.gc.check_no_more_rawrefcount_state() lltype.free(r1, flavor='raw') def test_pypy_nonlight_survives_from_raw_old(self): self.test_pypy_nonlight_survives_from_raw(old=True) def test_pypy_nonlight_survives_from_obj_old(self): self.test_pypy_nonlight_survives_from_obj(old=True) def test_pypy_nonlight_dies_quickly_old(self): self.test_pypy_nonlight_dies_quickly(old=True) @py.test.mark.parametrize('external', [False, True]) def test_pyobject_pypy_link_dies_on_minor_collection(self, external): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_pyobj=True, force_external=external)) check_alive(0) r1.ob_refcnt += 1 # the pyobject is kept alive self._collect(major=False) assert r1.ob_refcnt == 1 # refcnt dropped to 1 assert r1.ob_pypy_link == 0 # detached self.gc.check_no_more_rawrefcount_state() lltype.free(r1, flavor='raw') @py.test.mark.parametrize('old,external', [ (False, False), (True, False), (False, True)]) def test_pyobject_dies(self, old, external): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_pyobj=True, create_old=old, force_external=external)) check_alive(0) if old: self._collect(major=False) check_alive(0) self._collect(major=True, expected_trigger=1) else: self._collect(major=False, expected_trigger=1) assert r1.ob_refcnt == 1 # refcnt 1, in the pending list assert r1.ob_pypy_link == 0 # detached assert self.gc.rawrefcount_next_dead() == r1addr self.gc.check_no_more_rawrefcount_state() lltype.free(r1, flavor='raw') @py.test.mark.parametrize('old,external', [ (False, False), (True, False), (False, True)]) def test_pyobject_survives_from_obj(self, old, external): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, is_pyobj=True, create_old=old, force_external=external)) check_alive(0) self.stackroots.append(p1) self._collect(major=False) check_alive(0) self._collect(major=True) check_alive(0) p1 = self.stackroots.pop() self._collect(major=False) check_alive(0) assert p1.x == 42 assert self.trigger == [] self._collect(major=True, expected_trigger=1) py.test.raises(RuntimeError, "p1.x") # dead assert r1.ob_refcnt == 1 assert r1.ob_pypy_link == 0 assert self.gc.rawrefcount_next_dead() == r1addr self.gc.check_no_more_rawrefcount_state() lltype.free(r1, flavor='raw') def test_pyobject_attached_to_prebuilt_obj(self): p1, p1ref, r1, r1addr, check_alive = ( self._rawrefcount_pair(42, create_immortal=True)) check_alive(0) self._collect(major=True) check_alive(0)
[ "py.test.mark.parametrize", "rpython.rtyper.lltypesystem.lltype.nullptr", "rpython.rtyper.lltypesystem.lltype.free", "py.test.raises", "rpython.rtyper.lltypesystem.lltype.GcForwardReference", "rpython.rtyper.lltypesystem.lltype.cast_opaque_ptr", "rpython.rtyper.lltypesystem.lltype.Ptr", "rpython.rtyper.lltypesystem.lltype.malloc", "rpython.rtyper.lltypesystem.llmemory.cast_ptr_to_adr" ]
[((411, 438), 'rpython.rtyper.lltypesystem.lltype.GcForwardReference', 'lltype.GcForwardReference', ([], {}), '()\n', (436, 438), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((9154, 9205), 'py.test.mark.parametrize', 'py.test.mark.parametrize', (['"""external"""', '[False, True]'], {}), "('external', [False, True])\n", (9178, 9205), False, 'import py\n'), ((9732, 9825), 'py.test.mark.parametrize', 'py.test.mark.parametrize', (['"""old,external"""', '[(False, False), (True, False), (False, True)]'], {}), "('old,external', [(False, False), (True, False), (\n False, True)])\n", (9756, 9825), False, 'import py\n'), ((10561, 10654), 'py.test.mark.parametrize', 'py.test.mark.parametrize', (['"""old,external"""', '[(False, False), (True, False), (False, True)]'], {}), "('old,external', [(False, False), (True, False), (\n False, True)])\n", (10585, 10654), False, 'import py\n'), ((1985, 2027), 'rpython.rtyper.lltypesystem.lltype.cast_opaque_ptr', 'lltype.cast_opaque_ptr', (['llmemory.GCREF', 'p1'], {}), '(llmemory.GCREF, p1)\n', (2007, 2027), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((2041, 2105), 'rpython.rtyper.lltypesystem.lltype.malloc', 'lltype.malloc', (['PYOBJ_HDR'], {'flavor': '"""raw"""', 'immortal': 'create_immortal'}), "(PYOBJ_HDR, flavor='raw', immortal=create_immortal)\n", (2054, 2105), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((2177, 2205), 'rpython.rtyper.lltypesystem.llmemory.cast_ptr_to_adr', 'llmemory.cast_ptr_to_adr', (['r1'], {}), '(r1)\n', (2201, 2205), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((3262, 3304), 'rpython.rtyper.lltypesystem.lltype.cast_opaque_ptr', 'lltype.cast_opaque_ptr', (['llmemory.GCREF', 'p2'], {}), '(llmemory.GCREF, p2)\n', (3284, 3304), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((3318, 3356), 'rpython.rtyper.lltypesystem.lltype.malloc', 'lltype.malloc', (['PYOBJ_HDR'], {'flavor': '"""raw"""'}), "(PYOBJ_HDR, flavor='raw')\n", (3331, 3356), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((3427, 3455), 'rpython.rtyper.lltypesystem.llmemory.cast_ptr_to_adr', 'llmemory.cast_ptr_to_adr', (['r2'], {}), '(r2)\n', (3451, 3455), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((3859, 3888), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (3870, 3888), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((3897, 3926), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r2'], {'flavor': '"""raw"""'}), "(r2, flavor='raw')\n", (3908, 3926), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((4423, 4467), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""r1.ob_refcnt"""'], {}), "(RuntimeError, 'r1.ob_refcnt')\n", (4437, 4467), False, 'import py\n'), ((4486, 4522), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (4500, 4522), False, 'import py\n'), ((5011, 5055), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""r1.ob_refcnt"""'], {}), "(RuntimeError, 'r1.ob_refcnt')\n", (5025, 5055), False, 'import py\n'), ((5074, 5110), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (5088, 5110), False, 'import py\n'), ((5712, 5756), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""r1.ob_refcnt"""'], {}), "(RuntimeError, 'r1.ob_refcnt')\n", (5726, 5756), False, 'import py\n'), ((5775, 5811), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (5789, 5811), False, 'import py\n'), ((6887, 6923), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (6901, 6923), False, 'import py\n'), ((7282, 7311), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (7293, 7311), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((7849, 7885), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (7863, 7885), False, 'import py\n'), ((8088, 8117), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (8099, 8117), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((8540, 8576), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (8554, 8576), False, 'import py\n'), ((8779, 8808), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (8790, 8808), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((9696, 9725), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (9707, 9725), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((10525, 10554), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (10536, 10554), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((11287, 11323), 'py.test.raises', 'py.test.raises', (['RuntimeError', '"""p1.x"""'], {}), "(RuntimeError, 'p1.x')\n", (11301, 11323), False, 'import py\n'), ((11526, 11555), 'rpython.rtyper.lltypesystem.lltype.free', 'lltype.free', (['r1'], {'flavor': '"""raw"""'}), "(r1, flavor='raw')\n", (11537, 11555), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((550, 563), 'rpython.rtyper.lltypesystem.lltype.Ptr', 'lltype.Ptr', (['S'], {}), '(S)\n', (560, 563), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((600, 613), 'rpython.rtyper.lltypesystem.lltype.Ptr', 'lltype.Ptr', (['S'], {}), '(S)\n', (610, 613), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((1452, 1483), 'rpython.rtyper.lltypesystem.lltype.malloc', 'lltype.malloc', (['S'], {'immortal': '(True)'}), '(S, immortal=True)\n', (1465, 1483), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((3804, 3837), 'rpython.rtyper.lltypesystem.lltype.nullptr', 'lltype.nullptr', (['llmemory.GCREF.TO'], {}), '(llmemory.GCREF.TO)\n', (3818, 3837), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n'), ((2704, 2717), 'rpython.rtyper.lltypesystem.lltype.Ptr', 'lltype.Ptr', (['S'], {}), '(S)\n', (2714, 2717), False, 'from rpython.rtyper.lltypesystem import lltype, llmemory\n')]
from matplotlib import pyplot as plt from PIL import Image import pandas as pd import matplotlib import numpy as np from typing import Optional, Union, Mapping # Special from typing import Sequence, Iterable # ABCs from typing import Tuple # Classes from anndata import AnnData import warnings from stlearn.plotting.classes import GenePlot from stlearn.plotting.classes_bokeh import BokehGenePlot from stlearn.plotting._docs import doc_spatial_base_plot, doc_gene_plot from stlearn.utils import Empty, _empty, _AxesSubplot, _docs_params from bokeh.io import push_notebook, output_notebook from bokeh.plotting import show @_docs_params(spatial_base_plot=doc_spatial_base_plot, gene_plot=doc_gene_plot) def gene_plot( adata: AnnData, gene_symbols: Union[str, list] = None, threshold: Optional[float] = None, method: str = "CumSum", contour: bool = False, step_size: Optional[int] = None, title: Optional["str"] = None, figsize: Optional[Tuple[float, float]] = None, cmap: Optional[str] = "Spectral_r", use_label: Optional[str] = None, list_clusters: Optional[list] = None, ax: Optional[matplotlib.axes._subplots.Axes] = None, fig: Optional[matplotlib.figure.Figure] = None, show_plot: Optional[bool] = True, show_axis: Optional[bool] = False, show_image: Optional[bool] = True, show_color_bar: Optional[bool] = True, color_bar_label: Optional[str] = "", crop: Optional[bool] = True, margin: Optional[bool] = 100, size: Optional[float] = 7, image_alpha: Optional[float] = 1.0, cell_alpha: Optional[float] = 0.7, use_raw: Optional[bool] = False, fname: Optional[str] = None, dpi: Optional[int] = 120, ) -> Optional[AnnData]: """\ Allows the visualization of a single gene or multiple genes as the values of dot points or contour in the Spatial transcriptomics array. Parameters ------------------------------------- {spatial_base_plot} {gene_plot} Examples ------------------------------------- >>> import stlearn as st >>> adata = st.datasets.example_bcba() >>> genes = ["BRCA1","BRCA2"] >>> st.pl.gene_plot(adata, gene_symbols = genes) """ GenePlot( adata, gene_symbols=gene_symbols, threshold=threshold, method=method, contour=contour, step_size=step_size, title=title, figsize=figsize, cmap=cmap, use_label=use_label, list_clusters=list_clusters, ax=ax, fig=fig, show_plot=show_plot, show_axis=show_axis, show_image=show_image, show_color_bar=show_color_bar, color_bar_label=color_bar_label, crop=crop, margin=margin, size=size, image_alpha=image_alpha, cell_alpha=cell_alpha, use_raw=use_raw, fname=fname, dpi=dpi, ) def gene_plot_interactive(adata: AnnData): bokeh_object = BokehGenePlot(adata) output_notebook() show(bokeh_object.app, notebook_handle=True)
[ "bokeh.io.output_notebook", "stlearn.utils._docs_params", "stlearn.plotting.classes.GenePlot", "stlearn.plotting.classes_bokeh.BokehGenePlot", "bokeh.plotting.show" ]
[((631, 709), 'stlearn.utils._docs_params', '_docs_params', ([], {'spatial_base_plot': 'doc_spatial_base_plot', 'gene_plot': 'doc_gene_plot'}), '(spatial_base_plot=doc_spatial_base_plot, gene_plot=doc_gene_plot)\n', (643, 709), False, 'from stlearn.utils import Empty, _empty, _AxesSubplot, _docs_params\n'), ((2215, 2720), 'stlearn.plotting.classes.GenePlot', 'GenePlot', (['adata'], {'gene_symbols': 'gene_symbols', 'threshold': 'threshold', 'method': 'method', 'contour': 'contour', 'step_size': 'step_size', 'title': 'title', 'figsize': 'figsize', 'cmap': 'cmap', 'use_label': 'use_label', 'list_clusters': 'list_clusters', 'ax': 'ax', 'fig': 'fig', 'show_plot': 'show_plot', 'show_axis': 'show_axis', 'show_image': 'show_image', 'show_color_bar': 'show_color_bar', 'color_bar_label': 'color_bar_label', 'crop': 'crop', 'margin': 'margin', 'size': 'size', 'image_alpha': 'image_alpha', 'cell_alpha': 'cell_alpha', 'use_raw': 'use_raw', 'fname': 'fname', 'dpi': 'dpi'}), '(adata, gene_symbols=gene_symbols, threshold=threshold, method=\n method, contour=contour, step_size=step_size, title=title, figsize=\n figsize, cmap=cmap, use_label=use_label, list_clusters=list_clusters,\n ax=ax, fig=fig, show_plot=show_plot, show_axis=show_axis, show_image=\n show_image, show_color_bar=show_color_bar, color_bar_label=\n color_bar_label, crop=crop, margin=margin, size=size, image_alpha=\n image_alpha, cell_alpha=cell_alpha, use_raw=use_raw, fname=fname, dpi=dpi)\n', (2223, 2720), False, 'from stlearn.plotting.classes import GenePlot\n'), ((2971, 2991), 'stlearn.plotting.classes_bokeh.BokehGenePlot', 'BokehGenePlot', (['adata'], {}), '(adata)\n', (2984, 2991), False, 'from stlearn.plotting.classes_bokeh import BokehGenePlot\n'), ((2996, 3013), 'bokeh.io.output_notebook', 'output_notebook', ([], {}), '()\n', (3011, 3013), False, 'from bokeh.io import push_notebook, output_notebook\n'), ((3018, 3062), 'bokeh.plotting.show', 'show', (['bokeh_object.app'], {'notebook_handle': '(True)'}), '(bokeh_object.app, notebook_handle=True)\n', (3022, 3062), False, 'from bokeh.plotting import show\n')]
import csv import numpy as np import torch import time class Timer(object): """ docstring for Timer """ def __init__(self): super(Timer, self).__init__() self.total_time = 0.0 self.calls = 0 self.start_time = 0.0 self.diff = 0.0 self.average_time = 0.0 def tic(self): self.start_time = time.time() def toc(self, average = False): self.diff = time.time() - self.start_time self.calls += 1 self.total_time += self.diff self.average_time = self.total_time / self.calls if average: return self.average_time else: return self.diff def format(self, time): m,s = divmod(time, 60) h,m = divmod(m, 60) d,h = divmod(h, 24) return ("{}d:{}h:{}m:{}s".format(int(d), int(h), int(m), int(s))) def end_time(self, extra_time): """ calculate the end time for training, show local time """ localtime= time.asctime(time.localtime(time.time() + extra_time)) return localtime class AverageMeter(object): """Computes and stores the average and current value""" def __init__(self): self.reset() def reset(self): self.val = 0 self.avg = 0 self.sum = 0 self.count = 0 def update(self, val, n=1): self.val = val self.sum += val * n self.count += n self.avg = self.sum / self.count class Logger(object): def __init__(self, path, header): self.log_file = open(path, 'w') self.logger = csv.writer(self.log_file, delimiter='\t') self.logger.writerow(header) self.header = header def __del(self): self.log_file.close() def log(self, values): write_values = [] for col in self.header: assert col in values write_values.append(values[col]) self.logger.writerow(write_values) self.log_file.flush() def load_value_file(file_path): with open(file_path, 'r') as input_file: value = float(input_file.read().rstrip('\n\r')) return value def calculate_accuracy(outputs, targets): batch_size = targets.size(0) _, pred = outputs.topk(1, 1, True) pred = pred.t() correct = pred.eq(targets.view(1, -1)) n_correct_elems = correct.float().sum().data[0] return n_correct_elems / batch_size class MixUp(object): def __init__(self, alpha): self.alpha = alpha def mixup_data(self, x, y, use_cuda=True): """ return mixed inputs. pairs of targets """ if self.alpha > 0: lam = np.random.beta(self.alpha, self.alpha) else: lam = 1 batch_size = x.size()[0] if use_cuda: index = torch.randperm(batch_size).cuda() else: index = torch.randperm(batch_size) mixed_x = lam * x + (1 - lam) * x[index, :] y_a, y_b = y, y[index] return mixed_x, y_a, y_b, lam def mixup_criterion(self, criterion, pred, y_a, y_b, lam): return lam * criterion(pred, y_a) + (1 - lam) * criterion(pred, y_b) class TrainingHelper(object): def __init__(self, image): self.image = image def congratulation(self): """ if finish training success, print congratulation information """ for i in range(40): print('*')*i print('finish training') def submission_file(ids, outputs, filename): """ write list of ids and outputs to filename""" with open(filename, 'w') as f: for vid, output in zip(ids, outputs): scores = ['{:g}'.format(x) for x in output] f.write('{} {}\n'.format(vid, ' '.join(scores)))
[ "numpy.random.beta", "torch.randperm", "csv.writer", "time.time" ]
[((307, 318), 'time.time', 'time.time', ([], {}), '()\n', (316, 318), False, 'import time\n'), ((1435, 1476), 'csv.writer', 'csv.writer', (['self.log_file'], {'delimiter': '"""\t"""'}), "(self.log_file, delimiter='\\t')\n", (1445, 1476), False, 'import csv\n'), ((367, 378), 'time.time', 'time.time', ([], {}), '()\n', (376, 378), False, 'import time\n'), ((2503, 2541), 'numpy.random.beta', 'np.random.beta', (['self.alpha', 'self.alpha'], {}), '(self.alpha, self.alpha)\n', (2517, 2541), True, 'import numpy as np\n'), ((2718, 2744), 'torch.randperm', 'torch.randperm', (['batch_size'], {}), '(batch_size)\n', (2732, 2744), False, 'import torch\n'), ((872, 883), 'time.time', 'time.time', ([], {}), '()\n', (881, 883), False, 'import time\n'), ((2650, 2676), 'torch.randperm', 'torch.randperm', (['batch_size'], {}), '(batch_size)\n', (2664, 2676), False, 'import torch\n')]
import pygame pygame.mixer.init() pygame.mixer.music.load("myFile.wav") pygame.mixer.music.play() while pygame.mixer.music.get_busy() == True: continue
[ "pygame.mixer.init", "pygame.mixer.music.get_busy", "pygame.mixer.music.play", "pygame.mixer.music.load" ]
[((14, 33), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (31, 33), False, 'import pygame\n'), ((34, 71), 'pygame.mixer.music.load', 'pygame.mixer.music.load', (['"""myFile.wav"""'], {}), "('myFile.wav')\n", (57, 71), False, 'import pygame\n'), ((72, 97), 'pygame.mixer.music.play', 'pygame.mixer.music.play', ([], {}), '()\n', (95, 97), False, 'import pygame\n'), ((104, 133), 'pygame.mixer.music.get_busy', 'pygame.mixer.music.get_busy', ([], {}), '()\n', (131, 133), False, 'import pygame\n')]
from flask import render_template from . import main from ..requests import get_sources, get_articles #Views @main.route('/') def index(): ''' View root page function that returns the index page and its data ''' #Getting news sources sources = get_sources() title = 'News OTG' return render_template('index.html',title = title, sources = sources) @main.route('/articles/<sources_id>') def articles(sources_id): ''' View articles page function that returns the article details page and its data ''' articles = get_articles(sources_id) return render_template('articles.html',articles = articles)
[ "flask.render_template" ]
[((315, 374), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': 'title', 'sources': 'sources'}), "('index.html', title=title, sources=sources)\n", (330, 374), False, 'from flask import render_template\n'), ((599, 650), 'flask.render_template', 'render_template', (['"""articles.html"""'], {'articles': 'articles'}), "('articles.html', articles=articles)\n", (614, 650), False, 'from flask import render_template\n')]
# check utils zdecomp def izmat_zdecomp(): import numpy as np from limetr.special_mat import izmat ok = True tol = 1e-10 # setup problem # ------------------------------------------------------------------------- k = 3 n = [5, 2, 4] z_list = [] tr_u_list = [] tr_s_list = [] for i in range(len(n)): z_list.append(np.random.randn(n[i], k)) u, s, vt = np.linalg.svd(z_list[-1], full_matrices=False) tr_u_list.append(u) tr_s_list.append(s) z = np.vstack(z_list) tr_u = np.hstack([u.reshape(u.size, order='F') for u in tr_u_list]) tr_s = np.hstack(tr_s_list) my_u = np.zeros(tr_u.size) my_s = np.zeros(tr_s.size) nz = [z_sub.shape[0] for z_sub in z_list] nu = [u_sub.size for u_sub in tr_u_list] ns = [s_sub.size for s_sub in tr_s_list] izmat.zdecomp(nz, nu, ns, z, my_u, my_s) if not ok: print('err in zdecomp') print('err:', err) return ok
[ "numpy.random.randn", "limetr.special_mat.izmat.zdecomp", "numpy.zeros", "numpy.hstack", "numpy.linalg.svd", "numpy.vstack" ]
[((530, 547), 'numpy.vstack', 'np.vstack', (['z_list'], {}), '(z_list)\n', (539, 547), True, 'import numpy as np\n'), ((631, 651), 'numpy.hstack', 'np.hstack', (['tr_s_list'], {}), '(tr_s_list)\n', (640, 651), True, 'import numpy as np\n'), ((664, 683), 'numpy.zeros', 'np.zeros', (['tr_u.size'], {}), '(tr_u.size)\n', (672, 683), True, 'import numpy as np\n'), ((695, 714), 'numpy.zeros', 'np.zeros', (['tr_s.size'], {}), '(tr_s.size)\n', (703, 714), True, 'import numpy as np\n'), ((857, 897), 'limetr.special_mat.izmat.zdecomp', 'izmat.zdecomp', (['nz', 'nu', 'ns', 'z', 'my_u', 'my_s'], {}), '(nz, nu, ns, z, my_u, my_s)\n', (870, 897), False, 'from limetr.special_mat import izmat\n'), ((418, 464), 'numpy.linalg.svd', 'np.linalg.svd', (['z_list[-1]'], {'full_matrices': '(False)'}), '(z_list[-1], full_matrices=False)\n', (431, 464), True, 'import numpy as np\n'), ((373, 397), 'numpy.random.randn', 'np.random.randn', (['n[i]', 'k'], {}), '(n[i], k)\n', (388, 397), True, 'import numpy as np\n')]
"""OpenAQ Air Quality Dashboard with Flask.""" from datetime import datetime from flask import Flask, render_template from flask_sqlalchemy import SQLAlchemy import openaq APP = Flask(__name__) APP.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3' DB = SQLAlchemy(APP) class Record(DB.Model): id = DB.Column(DB.Integer, primary_key=True) utc_datetime = DB.Column(DB.DateTime) location = DB.Column(DB.String(50)) value = DB.Column(DB.Float, nullable=False) def __repr__(self): return f'< Time {self.utc_datetime} --- Value {self.value} >' def get_measurements(city='Los Angeles', parameter='pm25'): api = openaq.OpenAQ() status, body = api.measurements(city=city, parameter=parameter) return [{'utc_datetime': datetime.strptime(result['date']['utc'], '%Y-%m-%dT%H:%M:%S.%f%z'), 'location': result['location'], 'value': result['value']} for result in body['results']] @APP.route('/') def root(): """Base view.""" records = Record.query.filter(Record.value >= 10).all() return render_template('base.html', city='Los Angeles', records=records) @APP.route('/refresh') def refresh(): """New data replace existing one.""" DB.drop_all() DB.create_all() data = get_measurements() for record in data: DB.session.add(Record(utc_datetime=record['utc_datetime'], location=record['location'], value=record['value'])) DB.session.commit() return 'Data refreshed!' @APP.route('/locations/<city>') def locations(city='Los Angeles'): """ location Los Angeles.""" api = openaq.OpenAQ() status, body = api.locations(city=city) locations = [{'name': loc['location'], 'latitude': loc['coordinates']['latitude'], 'longitude': loc['coordinates']['longitude']} for loc in body['results']] return render_template('locations.html', city=city, locations=locations)
[ "flask.Flask", "openaq.OpenAQ", "datetime.datetime.strptime", "flask_sqlalchemy.SQLAlchemy", "flask.render_template" ]
[((179, 194), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (184, 194), False, 'from flask import Flask, render_template\n'), ((263, 278), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['APP'], {}), '(APP)\n', (273, 278), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((651, 666), 'openaq.OpenAQ', 'openaq.OpenAQ', ([], {}), '()\n', (664, 666), False, 'import openaq\n'), ((1116, 1181), 'flask.render_template', 'render_template', (['"""base.html"""'], {'city': '"""Los Angeles"""', 'records': 'records'}), "('base.html', city='Los Angeles', records=records)\n", (1131, 1181), False, 'from flask import Flask, render_template\n'), ((1755, 1770), 'openaq.OpenAQ', 'openaq.OpenAQ', ([], {}), '()\n', (1768, 1770), False, 'import openaq\n'), ((2023, 2088), 'flask.render_template', 'render_template', (['"""locations.html"""'], {'city': 'city', 'locations': 'locations'}), "('locations.html', city=city, locations=locations)\n", (2038, 2088), False, 'from flask import Flask, render_template\n'), ((764, 830), 'datetime.datetime.strptime', 'datetime.strptime', (["result['date']['utc']", '"""%Y-%m-%dT%H:%M:%S.%f%z"""'], {}), "(result['date']['utc'], '%Y-%m-%dT%H:%M:%S.%f%z')\n", (781, 830), False, 'from datetime import datetime\n')]
''' Created on 13 Aug 2020 @author: <NAME> ''' from .ts_util import * import numpy as np from typing import List, Tuple class ts_data(object): def __init__(self, ts: np.array, prop_train: float =0.75, has_time:bool = True, delta_t:float = 1.0): ''' Utility object for time series data. :param ts: PxN time series matrix of P timesteps consisting of N-1 features or PxN snapshot matrix of P timesteps consisting of N features :param prop_train: proportion of training data :param has_time: is ts a snapshot matrix :param delta_t: timestep to be applied if ts is a snapshot matrix ''' if (has_time): self.ts = ts else: self.ts = add_uni_time(ts, delta_t) self._create_train_test(prop_train) self.train_ts_centered = None self.test_ts_centered = None self.ts_centered = None self.train_ts_norm = None self.test_ts_norm = None self.ts_norm = None self.train_mean = None self.train_std = None self.train_inv_std = None self.x = None self.x_val = None self.x_all = None self.x_inp = None self.train_chunks = None self.x_val_inp = None self.x_val2_inp = None @property def num_features(self): return self.ts.shape[1] - 1 @property def num_obs(self): return self.ts.shape[0] @property def num_train(self): return self.train_ts.shape[0] @property def num_train_filtered(self): return self.x.shape[0] @property def num_test(self): return self.test_ts.shape[0] @property def num_test_filtered(self): return self.x_val.shape[0] def _create_train_test(self, prop_train=0.75): self.train_ts, self.test_ts = train_test_split_ts(self.ts, prop_train) def standardize(self): ''' Standardize all training and evaluation set with the mean and the standard deviation matrix of the training set. ''' self.train_ts_centered, self.train_mean = center_ts(self.train_ts) self.test_ts_centered = translate_ts(self.test_ts, -self.train_mean) self.ts_centered = translate_ts(self.ts, -self.train_mean) self.train_std = std_ts(self.train_ts_centered) self.train_inv_std = np.linalg.inv(self.train_std) self.train_ts_norm = scale_ts(self.train_ts_centered, self.train_inv_std) self.test_ts_norm = scale_ts(self.test_ts_centered, self.train_inv_std) self.ts_norm = scale_ts(self.ts_centered, self.train_inv_std) def generate_train_model_inputs(self, num_train_chunks:int =1, rate:float =0): ''' 'Hankelize' the training data and apply adaptive sampling rate to all data sets. :param num_train_chunks: number of chunks :param rate: threshold value for sampling ''' self.x = adapt_sampling_rate(self.train_ts_norm, rate) self.x_val = adapt_sampling_rate(self.test_ts_norm, rate) self.x_all = adapt_sampling_rate(self.ts_norm, rate) self.x_inp, self.train_chunks = prepare_train_model_data(self.x, num_train_chunks) self.x_val_inp, _ = prepare_train_model_data(self.x_val, 1) self.x_all[:,-1] = self.x_all[:,-1] - self.x_all[self.train_chunks[-1],-1] self.x_val2_inp, _ = prepare_train_model_data(self.x_all[self.train_chunks[-1]:], 1)
[ "numpy.linalg.inv" ]
[((2492, 2521), 'numpy.linalg.inv', 'np.linalg.inv', (['self.train_std'], {}), '(self.train_std)\n', (2505, 2521), True, 'import numpy as np\n')]
# %% [markdown] """ # Target Tracking This example demonstrates the kernel-based stochastic optimal control algorithm and the dynamic programming algorithm. By default, it uses a nonholonomic vehicle system (unicycle dynamics), and seeks to track a v-shaped trajectory. To run the example, use the following command: ```shell python examples/control/tracking.py ``` """ # %% import gym import numpy as np from gym.envs.registration import make from gym_socks.algorithms.control.kernel_control_fwd import KernelControlFwd from gym_socks.algorithms.control.kernel_control_bwd import KernelControlBwd from functools import partial from sklearn.metrics.pairwise import rbf_kernel from gym_socks.sampling import sample from gym_socks.sampling import default_sampler from gym_socks.sampling import random_sampler from gym_socks.sampling import grid_sampler from gym_socks.utils.grid import make_grid_from_ranges # %% [markdown] # Configuration variables. # %% system_id = "NonholonomicVehicleEnv-v0" sigma = 3 # Kernel bandwidth parameter. regularization_param = 1e-7 # Regularization parameter. time_horizon = 20 # For controlling randomness. seed = 12345 # %% [markdown] # ## Generate the Sample # # We generate a random sample from the system, and choose random control actions and # random initial conditions. # %% env = make(system_id) env.sampling_time = 0.1 env.seed(seed) env.action_space = gym.spaces.Box( low=np.array([0.1, -10.1], dtype=np.float32), high=np.array([1.1, 10.1], dtype=np.float32), shape=(2,), dtype=np.float32, seed=seed, ) sample_size = 1500 sample_space = gym.spaces.Box( low=np.array([-1.2, -1.2, -2 * np.pi], dtype=np.float32), high=np.array([1.2, 1.2, 2 * np.pi], dtype=np.float32), shape=(3,), dtype=np.float32, seed=seed, ) state_sampler = random_sampler(sample_space=sample_space) action_sampler = random_sampler(sample_space=env.action_space) S = sample( sampler=default_sampler( state_sampler=state_sampler, action_sampler=action_sampler, env=env ), sample_size=sample_size, ) A = make_grid_from_ranges([np.linspace(0.1, 1.1, 10), np.linspace(-10.1, 10.1, 21)]) # %% [markdown] # We define the cost as the norm distance to the target at each time step. # %% a = 0.5 # Path amplitude. p = 2.0 # Path period. target_trajectory = [ [ (x * 0.1) - 1.0, 4 * a / p * np.abs((((((x * 0.1) - 1.0) - p / 2) % p) + p) % p - p / 2) - a, ] for x in range(time_horizon) ] def _tracking_cost(time: int = 0, state: np.ndarray = None) -> float: """Tracking cost function. The goal is to minimize the distance of the x/y position of the vehicle to the 'state' of the target trajectory at each time step. Args: time : Time of the simulation. Used for time-dependent cost functions. state : State of the system. Returns: cost : Real-valued cost. """ dist = state[:, :2] - np.array([target_trajectory[time]]) result = np.linalg.norm(dist, ord=2, axis=1) result = np.power(result, 2) return result # %% [markdown] # ## Algorithm # # Now, we can compute the policy using the algorithm, and then simulate the system # forward in time using the computed policy. # # In order to change this to the dynamic programming algorithm, use `KernelControlBwd`. # %% # Compute the policy. policy = KernelControlFwd( time_horizon=time_horizon, cost_fn=_tracking_cost, kernel_fn=partial(rbf_kernel, gamma=1 / (2 * (sigma ** 2))), regularization_param=regularization_param, verbose=False, ) policy.train(S=S, A=A) # Simulate the controlled system. env.reset() initial_condition = [-0.8, 0, 0] env.state = initial_condition trajectory = [initial_condition] for t in range(time_horizon): action = policy(time=t, state=[env.state]) state, *_ = env.step(time=t, action=action) trajectory.append(list(state)) # %% [markdown] # ## Results # # We then plot the simulated trajectories of the actual system alongside the predicted # state trajectory using the approximated dynamics. # %% import matplotlib import matplotlib.pyplot as plt fig = plt.figure() ax = plt.axes() target_trajectory = np.array(target_trajectory, dtype=np.float32) plt.plot( target_trajectory[:, 0], target_trajectory[:, 1], marker="o", color="C0", label="Target Trajectory", ) trajectory = np.array(trajectory, dtype=np.float32) plt.plot( trajectory[:, 0], trajectory[:, 1], color="C1", label="System Trajectory", ) # Plot the markers as arrows, showing vehicle heading. paper_airplane = [(0, -0.25), (0.5, -0.5), (0, 1), (-0.5, -0.5), (0, -0.25)] for x in trajectory: angle = -np.rad2deg(x[2]) t = matplotlib.markers.MarkerStyle(marker=paper_airplane) t._transform = t.get_transform().rotate_deg(angle) plt.plot(x[0], x[1], marker=t, markersize=15, linestyle="None", color="C1") plt.legend() plt.show()
[ "functools.partial", "gym_socks.sampling.random_sampler", "matplotlib.pyplot.show", "numpy.abs", "matplotlib.pyplot.plot", "matplotlib.pyplot.axes", "numpy.power", "matplotlib.pyplot.legend", "numpy.rad2deg", "matplotlib.pyplot.figure", "numpy.array", "gym.envs.registration.make", "numpy.linalg.norm", "gym_socks.sampling.default_sampler", "numpy.linspace", "matplotlib.markers.MarkerStyle" ]
[((1343, 1358), 'gym.envs.registration.make', 'make', (['system_id'], {}), '(system_id)\n', (1347, 1358), False, 'from gym.envs.registration import make\n'), ((1833, 1874), 'gym_socks.sampling.random_sampler', 'random_sampler', ([], {'sample_space': 'sample_space'}), '(sample_space=sample_space)\n', (1847, 1874), False, 'from gym_socks.sampling import random_sampler\n'), ((1892, 1937), 'gym_socks.sampling.random_sampler', 'random_sampler', ([], {'sample_space': 'env.action_space'}), '(sample_space=env.action_space)\n', (1906, 1937), False, 'from gym_socks.sampling import random_sampler\n'), ((4160, 4172), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4170, 4172), True, 'import matplotlib.pyplot as plt\n'), ((4178, 4188), 'matplotlib.pyplot.axes', 'plt.axes', ([], {}), '()\n', (4186, 4188), True, 'import matplotlib.pyplot as plt\n'), ((4210, 4255), 'numpy.array', 'np.array', (['target_trajectory'], {'dtype': 'np.float32'}), '(target_trajectory, dtype=np.float32)\n', (4218, 4255), True, 'import numpy as np\n'), ((4256, 4369), 'matplotlib.pyplot.plot', 'plt.plot', (['target_trajectory[:, 0]', 'target_trajectory[:, 1]'], {'marker': '"""o"""', 'color': '"""C0"""', 'label': '"""Target Trajectory"""'}), "(target_trajectory[:, 0], target_trajectory[:, 1], marker='o',\n color='C0', label='Target Trajectory')\n", (4264, 4369), True, 'import matplotlib.pyplot as plt\n'), ((4403, 4441), 'numpy.array', 'np.array', (['trajectory'], {'dtype': 'np.float32'}), '(trajectory, dtype=np.float32)\n', (4411, 4441), True, 'import numpy as np\n'), ((4442, 4530), 'matplotlib.pyplot.plot', 'plt.plot', (['trajectory[:, 0]', 'trajectory[:, 1]'], {'color': '"""C1"""', 'label': '"""System Trajectory"""'}), "(trajectory[:, 0], trajectory[:, 1], color='C1', label=\n 'System Trajectory')\n", (4450, 4530), True, 'import matplotlib.pyplot as plt\n'), ((4930, 4942), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4940, 4942), True, 'import matplotlib.pyplot as plt\n'), ((4943, 4953), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4951, 4953), True, 'import matplotlib.pyplot as plt\n'), ((3010, 3045), 'numpy.linalg.norm', 'np.linalg.norm', (['dist'], {'ord': '(2)', 'axis': '(1)'}), '(dist, ord=2, axis=1)\n', (3024, 3045), True, 'import numpy as np\n'), ((3059, 3078), 'numpy.power', 'np.power', (['result', '(2)'], {}), '(result, 2)\n', (3067, 3078), True, 'import numpy as np\n'), ((4739, 4792), 'matplotlib.markers.MarkerStyle', 'matplotlib.markers.MarkerStyle', ([], {'marker': 'paper_airplane'}), '(marker=paper_airplane)\n', (4769, 4792), False, 'import matplotlib\n'), ((4853, 4928), 'matplotlib.pyplot.plot', 'plt.plot', (['x[0]', 'x[1]'], {'marker': 't', 'markersize': '(15)', 'linestyle': '"""None"""', 'color': '"""C1"""'}), "(x[0], x[1], marker=t, markersize=15, linestyle='None', color='C1')\n", (4861, 4928), True, 'import matplotlib.pyplot as plt\n'), ((1441, 1481), 'numpy.array', 'np.array', (['[0.1, -10.1]'], {'dtype': 'np.float32'}), '([0.1, -10.1], dtype=np.float32)\n', (1449, 1481), True, 'import numpy as np\n'), ((1492, 1531), 'numpy.array', 'np.array', (['[1.1, 10.1]'], {'dtype': 'np.float32'}), '([1.1, 10.1], dtype=np.float32)\n', (1500, 1531), True, 'import numpy as np\n'), ((1648, 1700), 'numpy.array', 'np.array', (['[-1.2, -1.2, -2 * np.pi]'], {'dtype': 'np.float32'}), '([-1.2, -1.2, -2 * np.pi], dtype=np.float32)\n', (1656, 1700), True, 'import numpy as np\n'), ((1711, 1760), 'numpy.array', 'np.array', (['[1.2, 1.2, 2 * np.pi]'], {'dtype': 'np.float32'}), '([1.2, 1.2, 2 * np.pi], dtype=np.float32)\n', (1719, 1760), True, 'import numpy as np\n'), ((1963, 2051), 'gym_socks.sampling.default_sampler', 'default_sampler', ([], {'state_sampler': 'state_sampler', 'action_sampler': 'action_sampler', 'env': 'env'}), '(state_sampler=state_sampler, action_sampler=action_sampler,\n env=env)\n', (1978, 2051), False, 'from gym_socks.sampling import default_sampler\n'), ((2122, 2147), 'numpy.linspace', 'np.linspace', (['(0.1)', '(1.1)', '(10)'], {}), '(0.1, 1.1, 10)\n', (2133, 2147), True, 'import numpy as np\n'), ((2149, 2177), 'numpy.linspace', 'np.linspace', (['(-10.1)', '(10.1)', '(21)'], {}), '(-10.1, 10.1, 21)\n', (2160, 2177), True, 'import numpy as np\n'), ((2961, 2996), 'numpy.array', 'np.array', (['[target_trajectory[time]]'], {}), '([target_trajectory[time]])\n', (2969, 2996), True, 'import numpy as np\n'), ((3478, 3525), 'functools.partial', 'partial', (['rbf_kernel'], {'gamma': '(1 / (2 * sigma ** 2))'}), '(rbf_kernel, gamma=1 / (2 * sigma ** 2))\n', (3485, 3525), False, 'from functools import partial\n'), ((4713, 4729), 'numpy.rad2deg', 'np.rad2deg', (['x[2]'], {}), '(x[2])\n', (4723, 4729), True, 'import numpy as np\n'), ((2402, 2455), 'numpy.abs', 'np.abs', (['(((x * 0.1 - 1.0 - p / 2) % p + p) % p - p / 2)'], {}), '(((x * 0.1 - 1.0 - p / 2) % p + p) % p - p / 2)\n', (2408, 2455), True, 'import numpy as np\n')]
""" ckwg +31 Copyright 2016-2020 by Kitware, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither name of Kitware, Inc. nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ============================================================================== Tests for vital.types.Rotation class """ from __future__ import print_function import math import unittest import nose.tools import numpy from kwiver.vital.types import rotation, RotationD, RotationF def array_normalize(a, dtype=None): a = numpy.asarray(a, dtype) return (a / numpy.linalg.norm(a)).tolist() class TestVitalRotation(unittest.TestCase): def test_new_default(self): # That these even construct rot_d = RotationD() nose.tools.assert_equal(rot_d.type_name, "d") rot_f = RotationF() nose.tools.assert_equal(rot_f.type_name, "f") def test_eq(self): # Identities should equal r1 = RotationD() r2 = RotationD() nose.tools.assert_equal(r1, r2) r3 = RotationD([1, 2, 3, 4]) r4 = RotationD([1, 2, 3, 4]) nose.tools.assert_equal(r3, r4) nose.tools.assert_false(r1 == r3) r1 = RotationF() r2 = RotationF() nose.tools.assert_equal(r1, r2) r3 = RotationF([1, 2, 3, 4]) r4 = RotationF([1, 2, 3, 4]) nose.tools.assert_equal(r3, r4) nose.tools.assert_false(r1 == r3) r1 = RotationD([1, 2, 3, 4]) r2 = RotationD([-1, -2, -3, -4]) assert r1.angle_from(r2) < 1e-12 def test_not_eq(self): # Identities should equal r1 = RotationD() r2 = RotationD() nose.tools.assert_false(r1 != r2) r3 = RotationD([1, 2, 3, 4]) r4 = RotationD([1, 2, 3, 4]) nose.tools.assert_false(r3 != r4) nose.tools.ok_(r1 != r3) r1 = RotationF() r2 = RotationF() nose.tools.assert_false(r1 != r2) r3 = RotationF([1, 2, 3, 4]) r4 = RotationF([1, 2, 3, 4]) nose.tools.assert_false(r3 != r4) nose.tools.ok_(r1 != r3) def test_to_matrix(self): # Default value should be identity rot_d = RotationD() numpy.testing.assert_array_equal(rot_d.matrix(), numpy.eye(3)) rot_f = RotationF() numpy.testing.assert_array_equal(rot_f.matrix(), numpy.eye(3)) def test_to_quaternion(self): rot_d = RotationD() numpy.testing.assert_array_equal(rot_d.quaternion(), [0, 0, 0, 1]) rot_f = RotationF() numpy.testing.assert_array_equal(rot_f.quaternion(), [0, 0, 0, 1]) def test_to_axis_angle(self): # expected identity: [0,0,1] and 0 ident_axis = [0, 0, 1] ident_angle = 0 rot_d = RotationD() rot_f = RotationF() numpy.testing.assert_equal(rot_d.axis(), ident_axis) nose.tools.assert_equal(rot_d.angle(), ident_angle) numpy.testing.assert_equal(rot_f.axis(), ident_axis) nose.tools.assert_equal(rot_f.angle(), ident_angle) def test_to_rodrigues(self): # rodrigues identity: [0,0,0] ident_rod = [0, 0, 0] rot_d = RotationD() rot_f = RotationF() rod = rot_d.rodrigues() numpy.testing.assert_equal(rod, ident_rod) rod = rot_f.rodrigues() numpy.testing.assert_equal(rod, ident_rod) def test_to_ypr(self): # ypr identity: (pi/2, 0, pi) ident_ypr = (math.pi / 2, 0, -math.pi) rot_d = RotationD() rot_f = RotationF() numpy.testing.assert_almost_equal(rot_d.yaw_pitch_roll(), ident_ypr, 15) numpy.testing.assert_almost_equal(rot_f.yaw_pitch_roll(), ident_ypr) def test_from_rotation(self): r = RotationD() r_cpy = RotationD(r) nose.tools.ok_(r == r_cpy) r = RotationD([1, 2, 3, 4]) r_cpy = RotationD(r) nose.tools.ok_(r == r_cpy) r = RotationF() r_cpy = RotationF(r) nose.tools.ok_(r == r_cpy) r = RotationF([1, 2, 3, 4]) r_cpy = RotationF(r) nose.tools.ok_(r == r_cpy) def test_from_rotation_other_type(self): r = RotationD() r_cpy = RotationF(r) numpy.testing.assert_array_almost_equal(r.quaternion(), r_cpy.quaternion(), 6) r = RotationD([1, 2, 3, 4]) r_cpy = RotationF(r) numpy.testing.assert_array_almost_equal(r.quaternion(), r_cpy.quaternion(), 6) r = RotationF() r_cpy = RotationD(r) numpy.testing.assert_array_almost_equal(r.quaternion(), r_cpy.quaternion(), 6) r = RotationF([1, 2, 3, 4]) r_cpy = RotationD(r) numpy.testing.assert_array_almost_equal(r.quaternion(), r_cpy.quaternion(), 6) def test_from_quaternion(self): q = array_normalize([+2, -1, -3, +0], float) r = RotationD(q) numpy.testing.assert_equal(r.quaternion(), q) def test_from_rodrigues(self): rod_list_1 = [0, 0, 0] r1 = RotationD(rod_list_1) numpy.testing.assert_equal(r1.rodrigues(), rod_list_1) # This one will get normalized by magnitude in rotation instance # This vector's is less than 2*pi, so we should expect this vector to be # returned as is. rod2 = numpy.array([2, -1, 0.5]) nod2_normed = array_normalize(rod2) print("r2 2-norm:", numpy.linalg.norm(rod2)) print("r2-normed:", nod2_normed) r2 = RotationD(rod2) numpy.testing.assert_array_almost_equal( r2.rodrigues(), rod2, decimal=14, # 1e-14 ) def test_from_aa(self): # Axis should come out of rotation normalized angle = 0.8 axis = [-3, 2, 1] axis_norm = array_normalize(axis) r = RotationD(angle, axis) nose.tools.assert_equal(angle, r.angle()) numpy.testing.assert_equal(axis_norm, r.axis()) def test_from_ypr(self): y = 1.2 p = 0.3 r = -1.0 # XXX rot = RotationD(y, p, r) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(y, ry, 14) nose.tools.assert_almost_equal(p, rp, 14) nose.tools.assert_almost_equal(r, rr, 14) # 0XX rot = RotationD(0, p, r) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(0, ry, 14) nose.tools.assert_almost_equal(p, rp, 14) nose.tools.assert_almost_equal(r, rr, 14) # X0X rot = RotationD(y, 0, r) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(y, ry, 14) nose.tools.assert_almost_equal(0, rp, 14) nose.tools.assert_almost_equal(r, rr, 14) # XX0 rot = RotationD(y, p, 0) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(y, ry, 14) nose.tools.assert_almost_equal(p, rp, 14) nose.tools.assert_almost_equal(0, rr, 14) # 00X rot = RotationD(0, 0, r) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(0, ry, 14) nose.tools.assert_almost_equal(0, rp, 14) nose.tools.assert_almost_equal(r, rr, 14) # 0X0 rot = RotationD(0, p, 0) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(0, ry, 14) nose.tools.assert_almost_equal(p, rp, 14) nose.tools.assert_almost_equal(0, rr, 14) # X00 rot = RotationD(y, 0, 0) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(y, ry, 14) nose.tools.assert_almost_equal(0, rp, 14) nose.tools.assert_almost_equal(0, rr, 14) # 000 rot = RotationD(0, 0, 0) ry, rp, rr = rot.yaw_pitch_roll() nose.tools.assert_almost_equal(0, ry, 14) nose.tools.assert_almost_equal(0, rp, 14) nose.tools.assert_almost_equal(0, rr, 14) def test_from_matrix(self): # Create a non-identity matrix from a different constructor that we # assume works # Create new rotation with that matrix. # New rotation to_matrix method should produce the same matrix pre_r = RotationD([+2, -1, -3, +0]) mat = pre_r.matrix() r = RotationD(mat) numpy.testing.assert_allclose(mat, r.matrix(), 1e-15) def test_inverse(self): # quaternion calc from: # https://www.wolframalpha.com/input/?i=quaternion:+0%2B2i-j-3k&lk=3 r = RotationD([+2, -1, -3, +0]) r_inv = r.inverse() e_inv = array_normalize([-1 / 7.0, +1 / 14.0, +3 / 14.0, 0]) numpy.testing.assert_allclose(r_inv.quaternion(), e_inv, 1e-15) r = RotationF([+2, -1, -3, +0]) r_inv = r.inverse() numpy.testing.assert_allclose(r_inv.quaternion(), e_inv, 1e-7) def test_mul(self): # Normalize quaternaion vector. expected_quat = array_normalize([+2.0, -1.0, -3.0, +0.0]) r_ident_d = RotationD() r_ident_f = RotationF() r_other_d = RotationD(expected_quat) r_other_f = RotationF(expected_quat) r_res_d = r_ident_d * r_other_d nose.tools.assert_is_not(r_other_d, r_res_d) numpy.testing.assert_equal(r_res_d, r_other_d) numpy.testing.assert_equal(r_res_d.quaternion(), expected_quat) r_res_f = r_ident_f * r_other_f nose.tools.assert_is_not(r_other_f, r_res_f) numpy.testing.assert_equal(r_res_f, r_other_f) numpy.testing.assert_allclose(r_res_f.quaternion(), expected_quat, 1e-7) def test_mul_vector(self): vec = [1, 0, 0] vec_expected = [0, 1, 0] r_axis = [0, 0, 1] r_angle = math.pi / 2.0 r = RotationD(r_angle, r_axis) vec_rotated = r * vec numpy.testing.assert_array_almost_equal(vec_expected, vec_rotated) def test_interpolation(self): x_d = RotationD(0, [1, 0, 0]) y_d = RotationD(math.pi / 2, [0, 1, 0]) r_d = RotationD(math.pi / 4, [0, 1, 0]) x_f = RotationF(0, [1, 0, 0]) y_f = RotationF(math.pi / 2, [0, 1, 0]) r_f = RotationF(math.pi / 4, [0, 1, 0]) z_d = rotation.interpolate_rotation(x_d, y_d, 0.5) z_f = rotation.interpolate_rotation(x_f, y_f, 0.5) nose.tools.assert_almost_equal((z_d.inverse() * r_d).angle(), 0, 14) nose.tools.assert_almost_equal((z_f.inverse() * r_f).angle(), 0, 6) def test_interpolated_rotations(self): x = RotationD(0, [1, 0, 0]) a = math.pi / 2 y = RotationD(a, [0, 1, 0]) i_list = rotation.interpolated_rotations(x, y, 3) nose.tools.assert_equal([i.type_name for i in i_list], ["d"] * 3) i0_e_axis, i0_e_angle = [0, 1, 0], a * 0.25 i1_e_axis, i1_e_angle = [0, 1, 0], a * 0.50 i2_e_axis, i2_e_angle = [0, 1, 0], a * 0.75 numpy.testing.assert_almost_equal(i_list[0].axis(), i0_e_axis, 14) numpy.testing.assert_almost_equal(i_list[0].angle(), i0_e_angle, 14) numpy.testing.assert_almost_equal(i_list[1].axis(), i1_e_axis, 14) numpy.testing.assert_almost_equal(i_list[1].angle(), i1_e_angle, 14) numpy.testing.assert_almost_equal(i_list[2].axis(), i2_e_axis, 14) numpy.testing.assert_almost_equal(i_list[2].angle(), i2_e_angle, 14)
[ "kwiver.vital.types.rotation.interpolate_rotation", "numpy.asarray", "numpy.array", "kwiver.vital.types.RotationD", "kwiver.vital.types.RotationF", "numpy.testing.assert_equal", "numpy.linalg.norm", "numpy.eye", "numpy.testing.assert_array_almost_equal", "kwiver.vital.types.rotation.interpolated_rotations" ]
[((1827, 1850), 'numpy.asarray', 'numpy.asarray', (['a', 'dtype'], {}), '(a, dtype)\n', (1840, 1850), False, 'import numpy\n'), ((2028, 2039), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (2037, 2039), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2111, 2122), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (2120, 2122), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2248, 2259), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (2257, 2259), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2273, 2284), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (2282, 2284), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2339, 2362), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2348, 2362), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2376, 2399), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2385, 2399), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2496, 2507), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (2505, 2507), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2521, 2532), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (2530, 2532), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2587, 2610), 'kwiver.vital.types.RotationF', 'RotationF', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2596, 2610), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2624, 2647), 'kwiver.vital.types.RotationF', 'RotationF', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2633, 2647), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2744, 2767), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (2753, 2767), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2781, 2808), 'kwiver.vital.types.RotationD', 'RotationD', (['[-1, -2, -3, -4]'], {}), '([-1, -2, -3, -4])\n', (2790, 2808), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2925, 2936), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (2934, 2936), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((2950, 2961), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (2959, 2961), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3018, 3041), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (3027, 3041), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3055, 3078), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (3064, 3078), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3168, 3179), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (3177, 3179), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3193, 3204), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (3202, 3204), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3261, 3284), 'kwiver.vital.types.RotationF', 'RotationF', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (3270, 3284), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3298, 3321), 'kwiver.vital.types.RotationF', 'RotationF', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (3307, 3321), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3487, 3498), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (3496, 3498), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3587, 3598), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (3596, 3598), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3721, 3732), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (3730, 3732), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3825, 3836), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (3834, 3836), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((4062, 4073), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (4071, 4073), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((4090, 4101), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (4099, 4101), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((4465, 4476), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (4474, 4476), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((4493, 4504), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (4502, 4504), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((4546, 4588), 'numpy.testing.assert_equal', 'numpy.testing.assert_equal', (['rod', 'ident_rod'], {}), '(rod, ident_rod)\n', (4572, 4588), False, 'import numpy\n'), ((4630, 4672), 'numpy.testing.assert_equal', 'numpy.testing.assert_equal', (['rod', 'ident_rod'], {}), '(rod, ident_rod)\n', (4656, 4672), False, 'import numpy\n'), ((4803, 4814), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (4812, 4814), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((4831, 4842), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (4840, 4842), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5050, 5061), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (5059, 5061), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5078, 5090), 'kwiver.vital.types.RotationD', 'RotationD', (['r'], {}), '(r)\n', (5087, 5090), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5139, 5162), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (5148, 5162), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5179, 5191), 'kwiver.vital.types.RotationD', 'RotationD', (['r'], {}), '(r)\n', (5188, 5191), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5240, 5251), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (5249, 5251), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5268, 5280), 'kwiver.vital.types.RotationF', 'RotationF', (['r'], {}), '(r)\n', (5277, 5280), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5329, 5352), 'kwiver.vital.types.RotationF', 'RotationF', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (5338, 5352), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5369, 5381), 'kwiver.vital.types.RotationF', 'RotationF', (['r'], {}), '(r)\n', (5378, 5381), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5475, 5486), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (5484, 5486), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5503, 5515), 'kwiver.vital.types.RotationF', 'RotationF', (['r'], {}), '(r)\n', (5512, 5515), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5616, 5639), 'kwiver.vital.types.RotationD', 'RotationD', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (5625, 5639), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5656, 5668), 'kwiver.vital.types.RotationF', 'RotationF', (['r'], {}), '(r)\n', (5665, 5668), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5769, 5780), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (5778, 5780), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5797, 5809), 'kwiver.vital.types.RotationD', 'RotationD', (['r'], {}), '(r)\n', (5806, 5809), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5910, 5933), 'kwiver.vital.types.RotationF', 'RotationF', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (5919, 5933), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((5950, 5962), 'kwiver.vital.types.RotationD', 'RotationD', (['r'], {}), '(r)\n', (5959, 5962), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((6153, 6165), 'kwiver.vital.types.RotationD', 'RotationD', (['q'], {}), '(q)\n', (6162, 6165), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((6301, 6322), 'kwiver.vital.types.RotationD', 'RotationD', (['rod_list_1'], {}), '(rod_list_1)\n', (6310, 6322), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((6584, 6609), 'numpy.array', 'numpy.array', (['[2, -1, 0.5]'], {}), '([2, -1, 0.5])\n', (6595, 6609), False, 'import numpy\n'), ((6762, 6777), 'kwiver.vital.types.RotationD', 'RotationD', (['rod2'], {}), '(rod2)\n', (6771, 6777), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((7076, 7098), 'kwiver.vital.types.RotationD', 'RotationD', (['angle', 'axis'], {}), '(angle, axis)\n', (7085, 7098), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((7313, 7331), 'kwiver.vital.types.RotationD', 'RotationD', (['y', 'p', 'r'], {}), '(y, p, r)\n', (7322, 7331), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((7553, 7571), 'kwiver.vital.types.RotationD', 'RotationD', (['(0)', 'p', 'r'], {}), '(0, p, r)\n', (7562, 7571), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((7793, 7811), 'kwiver.vital.types.RotationD', 'RotationD', (['y', '(0)', 'r'], {}), '(y, 0, r)\n', (7802, 7811), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((8033, 8051), 'kwiver.vital.types.RotationD', 'RotationD', (['y', 'p', '(0)'], {}), '(y, p, 0)\n', (8042, 8051), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((8273, 8291), 'kwiver.vital.types.RotationD', 'RotationD', (['(0)', '(0)', 'r'], {}), '(0, 0, r)\n', (8282, 8291), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((8513, 8531), 'kwiver.vital.types.RotationD', 'RotationD', (['(0)', 'p', '(0)'], {}), '(0, p, 0)\n', (8522, 8531), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((8753, 8771), 'kwiver.vital.types.RotationD', 'RotationD', (['y', '(0)', '(0)'], {}), '(y, 0, 0)\n', (8762, 8771), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((8993, 9011), 'kwiver.vital.types.RotationD', 'RotationD', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (9002, 9011), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((9473, 9500), 'kwiver.vital.types.RotationD', 'RotationD', (['[+2, -1, -3, +0]'], {}), '([+2, -1, -3, +0])\n', (9482, 9500), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((9542, 9556), 'kwiver.vital.types.RotationD', 'RotationD', (['mat'], {}), '(mat)\n', (9551, 9556), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((9771, 9798), 'kwiver.vital.types.RotationD', 'RotationD', (['[+2, -1, -3, +0]'], {}), '([+2, -1, -3, +0])\n', (9780, 9798), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((9981, 10008), 'kwiver.vital.types.RotationF', 'RotationF', (['[+2, -1, -3, +0]'], {}), '([+2, -1, -3, +0])\n', (9990, 10008), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((10260, 10271), 'kwiver.vital.types.RotationD', 'RotationD', ([], {}), '()\n', (10269, 10271), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((10292, 10303), 'kwiver.vital.types.RotationF', 'RotationF', ([], {}), '()\n', (10301, 10303), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((10324, 10348), 'kwiver.vital.types.RotationD', 'RotationD', (['expected_quat'], {}), '(expected_quat)\n', (10333, 10348), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((10369, 10393), 'kwiver.vital.types.RotationF', 'RotationF', (['expected_quat'], {}), '(expected_quat)\n', (10378, 10393), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((10496, 10542), 'numpy.testing.assert_equal', 'numpy.testing.assert_equal', (['r_res_d', 'r_other_d'], {}), '(r_res_d, r_other_d)\n', (10522, 10542), False, 'import numpy\n'), ((10717, 10763), 'numpy.testing.assert_equal', 'numpy.testing.assert_equal', (['r_res_f', 'r_other_f'], {}), '(r_res_f, r_other_f)\n', (10743, 10763), False, 'import numpy\n'), ((11006, 11032), 'kwiver.vital.types.RotationD', 'RotationD', (['r_angle', 'r_axis'], {}), '(r_angle, r_axis)\n', (11015, 11032), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11072, 11138), 'numpy.testing.assert_array_almost_equal', 'numpy.testing.assert_array_almost_equal', (['vec_expected', 'vec_rotated'], {}), '(vec_expected, vec_rotated)\n', (11111, 11138), False, 'import numpy\n'), ((11188, 11211), 'kwiver.vital.types.RotationD', 'RotationD', (['(0)', '[1, 0, 0]'], {}), '(0, [1, 0, 0])\n', (11197, 11211), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11226, 11259), 'kwiver.vital.types.RotationD', 'RotationD', (['(math.pi / 2)', '[0, 1, 0]'], {}), '(math.pi / 2, [0, 1, 0])\n', (11235, 11259), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11274, 11307), 'kwiver.vital.types.RotationD', 'RotationD', (['(math.pi / 4)', '[0, 1, 0]'], {}), '(math.pi / 4, [0, 1, 0])\n', (11283, 11307), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11323, 11346), 'kwiver.vital.types.RotationF', 'RotationF', (['(0)', '[1, 0, 0]'], {}), '(0, [1, 0, 0])\n', (11332, 11346), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11361, 11394), 'kwiver.vital.types.RotationF', 'RotationF', (['(math.pi / 2)', '[0, 1, 0]'], {}), '(math.pi / 2, [0, 1, 0])\n', (11370, 11394), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11409, 11442), 'kwiver.vital.types.RotationF', 'RotationF', (['(math.pi / 4)', '[0, 1, 0]'], {}), '(math.pi / 4, [0, 1, 0])\n', (11418, 11442), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11458, 11502), 'kwiver.vital.types.rotation.interpolate_rotation', 'rotation.interpolate_rotation', (['x_d', 'y_d', '(0.5)'], {}), '(x_d, y_d, 0.5)\n', (11487, 11502), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11517, 11561), 'kwiver.vital.types.rotation.interpolate_rotation', 'rotation.interpolate_rotation', (['x_f', 'y_f', '(0.5)'], {}), '(x_f, y_f, 0.5)\n', (11546, 11561), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11771, 11794), 'kwiver.vital.types.RotationD', 'RotationD', (['(0)', '[1, 0, 0]'], {}), '(0, [1, 0, 0])\n', (11780, 11794), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11831, 11854), 'kwiver.vital.types.RotationD', 'RotationD', (['a', '[0, 1, 0]'], {}), '(a, [0, 1, 0])\n', (11840, 11854), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((11872, 11912), 'kwiver.vital.types.rotation.interpolated_rotations', 'rotation.interpolated_rotations', (['x', 'y', '(3)'], {}), '(x, y, 3)\n', (11903, 11912), False, 'from kwiver.vital.types import rotation, RotationD, RotationF\n'), ((3556, 3568), 'numpy.eye', 'numpy.eye', (['(3)'], {}), '(3)\n', (3565, 3568), False, 'import numpy\n'), ((3656, 3668), 'numpy.eye', 'numpy.eye', (['(3)'], {}), '(3)\n', (3665, 3668), False, 'import numpy\n'), ((6682, 6705), 'numpy.linalg.norm', 'numpy.linalg.norm', (['rod2'], {}), '(rod2)\n', (6699, 6705), False, 'import numpy\n'), ((1867, 1887), 'numpy.linalg.norm', 'numpy.linalg.norm', (['a'], {}), '(a)\n', (1884, 1887), False, 'import numpy\n')]
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function __author__ = 'bibow' import json, uuid, os from datetime import datetime, date from decimal import Decimal import logging logger = logging.getLogger() logger.setLevel(eval(os.environ["LOGGINGLEVEL"])) import boto3 from boto3.dynamodb.conditions import Key, Attr dynamodb = boto3.resource('dynamodb') configData = dynamodb.Table('config_data') response = configData.get_item( Key={ 'key': "BACKOFFICEAPI" } ) BACKOFFICEAPI = response["Item"]["value"] # Helper class to convert a DynamoDB item to JSON. class JSONEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, Decimal): if o % 1 > 0: return float(o) else: return int(o) elif isinstance(o, (datetime, date)): return o.strftime("%Y-%m-%d %H:%M:%S") elif isinstance(o, (bytes, bytearray)): return str(o) else: return super(JSONEncoder, self).default(o) class OrdersModel(object): def __init__(self): self._orders = dynamodb.Table('orders') @property def orders(self): return self._orders def _getOrder(self, frontend, feOrderId): response = self.orders.query( IndexName="frontend_index", KeyConditionExpression=Key('frontend').eq(frontend) & Key('fe_order_id').eq(feOrderId), Limit=1 ) return response def getOrders(self): pass def getOrder(self, frontend, feOrderId): order = {} response = self._getOrder(frontend, feOrderId) if response['Count'] != 0: order = response["Items"][0] return { "statusCode": 200, "headers": {}, "body": (json.dumps(order, indent=4, cls=JSONEncoder)) } def insertOrder(self, frontend, feOrderId, order): insertStatus = BACKOFFICEAPI['DWFEORDERSTATUS_METRICS']['insert']['status'] order['tx_status'] = order.get("tx_status", "N") if order['fe_order_status'].lower() in insertStatus else "I" order['create_dt'] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") order['tx_dt'] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") order['tx_note'] = '{0} -> DataWald'.format(frontend) order['frontend'] = frontend response = self._getOrder(frontend, feOrderId) _id = str(uuid.uuid1()) if response['Count'] != 0: item = response["Items"][0] _id = item["id"] if order['fe_order_status'] != item['fe_order_status']: order["id"] = _id else: if item["tx_status"] == "N": order = item order["tx_status"] = "P" elif item["tx_status"] == "F" and order["tx_status"] == "N": order["id"] = _id else: order = item self.orders.put_item(Item=order) log = "Successfully update document: {0}/{1}".format(order["fe_order_id"], order["id"]) logger.info(log) else: order["id"] = _id self.orders.put_item(Item=order) log = "Successfully insert document: {0}/{1}".format(order["fe_order_id"], order["id"]) logger.info(log) return { "statusCode": 200, "headers": {}, "body": json.dumps({ "id": _id, "frontend": frontend, "fe_order_id": feOrderId }) } def updateOrderStatus(self, id, orderStatus): response = self.orders.update_item( Key={ 'id': id }, UpdateExpression="set bo_order_id=:val0, tx_dt=:val1, tx_status=:val2, tx_note=:val3", ExpressionAttributeValues={ ':val0': orderStatus['bo_order_id'], ':val1': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), ':val2': orderStatus['tx_status'], ':val3': orderStatus['tx_note'] }, ReturnValues="UPDATED_NEW" ) return { "statusCode": 200, "headers": {}, "body": (json.dumps(response, indent=4, cls=JSONEncoder)) } class ItemReceiptsModel(object): def __init__(self): self._itemReceipts = dynamodb.Table('itemreceipts') @property def itemReceipts(self): return self._itemReceipts def _getItemReceipt(self, frontend, boPONum): response = self.itemReceipts.query( IndexName="frontend_index", KeyConditionExpression=Key('frontend').eq(frontend) & Key('bo_po_num').eq(boPONum), Limit=1 ) return response def getItemReceipts(self): pass def getItemReceipt(self, frontend, boPONum): itemReceipt = {} response = self._getItemReceipt(frontend, boPONum) if response['Count'] != 0: itemReceipt = response["Items"][0] return { "statusCode": 200, "headers": {}, "body": (json.dumps(itemReceipt, indent=4, cls=JSONEncoder)) } def insertItemReceipt(self, frontend, boPONum, itemReceipt): itemReceipt["frontend"] = frontend itemReceipt["tx_status"] = itemReceipt.get("tx_status", "N") itemReceipt["tx_dt"] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") itemReceipt["tx_note"] = '{0} -> DataWald'.format(frontend) response = self._getItemReceipt(frontend, boPONum) _id = str(uuid.uuid1()) if response['Count'] != 0: item = response["Items"][0] _id = item["id"] if itemReceipt['data'] != item['data']: history = {} if 'history' in item.keys(): history = item['history'] createDt = item["create_dt"] history[createDt] = item['data'] itemReceipt['history'] = history itemReceipt["id"] = _id itemReceipt["bo_itemreceipt_id"] = item["bo_itemreceipt_id"] self.itemReceipts.put_item(Item=itemReceipt) log = "Successfully update item recepit: {0}/{1}".format(frontend, boPONum) logger.info(log) else: log = "No update item recepit: {0}/{1}".format(frontend, boPONum) logger.info(log) response = self.itemReceipts.update_item( Key={ 'id': _id }, UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2", ExpressionAttributeValues={ ':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), ':val1': "I", ':val2': log }, ReturnValues="UPDATED_NEW" ) else: itemReceipt["id"] = _id self.itemReceipts.put_item(Item=itemReceipt) log = "Successfully insert item recepit: {0}/{1}".format(frontend, boPONum) logger.info(log) return { "statusCode": 200, "headers": {}, "body": json.dumps({ "id": _id, "frontend": frontend, "bo_po_num": boPONum }) } def updateItemReceiptStatus(self, id, itemReceiptStatus): response = self.itemReceipts.update_item( Key={ 'id': id }, UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2, bo_itemreceipt_id=:val3", ExpressionAttributeValues={ ':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), ':val1': itemReceiptStatus['tx_status'], ':val2': itemReceiptStatus['tx_note'], ':val3': itemReceiptStatus['bo_itemreceipt_id'] }, ReturnValues="UPDATED_NEW" ) return { "statusCode": 200, "headers": {}, "body": (json.dumps(response, indent=4, cls=JSONEncoder)) } class CustomersModel(object): def __init__(self): self._customers = dynamodb.Table('customers-bo') @property def customers(self): return self._customers def _getCustomer(self, frontend, feCustomerId): response = self.customers.query( IndexName="frontend_index", KeyConditionExpression=Key('frontend').eq(frontend) & Key('fe_customer_id').eq(feCustomerId), Limit=1 ) return response def getCustomers(self): pass def getCustomer(self, frontend, feCustomerId): customer = {} response = self._getCustomer(frontend, feCustomerId) if response['Count'] != 0: customer = response["Items"][0] return { "statusCode": 200, "headers": {}, "body": (json.dumps(customer, indent=4, cls=JSONEncoder)) } def insertCustomer(self, frontend, feCustomerId, customer): customer['tx_status'] = customer.get("tx_status", "N") customer['tx_dt'] = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") customer['tx_note'] = '{0} -> DataWald'.format(frontend) customer['frontend'] = frontend response = self._getCustomer(frontend, feCustomerId) _id = str(uuid.uuid1()) if response['Count'] != 0: item = response["Items"][0] _id = item["id"] if customer['data'] != item['data']: createDt = item["create_dt"] customer["id"] = _id customer["create_dt"] = createDt self.customers.put_item(Item=customer) log = "Successfully update customer: {0}/{1}".format(frontend, feCustomerId) logger.info(log) else: log = "No update customer: {0}/{1}".format(frontend, feCustomerId) logger.info(log) response = self.customers.update_item( Key={ 'id': _id }, UpdateExpression="set tx_dt=:val0, tx_status=:val1, tx_note=:val2", ExpressionAttributeValues={ ':val0': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), ':val1': "N" if item['tx_status'] in ('N', 'F') else 'I', ':val2': log }, ReturnValues="UPDATED_NEW" ) else: customer["id"] = _id self.customers.put_item(Item=customer) log = "Successfully insert customer: {0}/{1}".format(frontend, feCustomerId) logger.info(log) return { "statusCode": 200, "headers": {}, "body": json.dumps({ "id": _id, "frontend": frontend, "fe_customer_id": feCustomerId }) } def updateCustomerStatus(self, id, customerStatus): response = self.customers.update_item( Key={ 'id': id }, UpdateExpression="set fe_customer_id=:val0, tx_dt=:val1, tx_status=:val2, tx_note=:val3", ExpressionAttributeValues={ ':val0': customerStatus['fe_customer_id'], ':val1': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), ':val2': customerStatus['tx_status'], ':val3': customerStatus['tx_note'] }, ReturnValues="UPDATED_NEW" ) return { "statusCode": 200, "headers": {}, "body": (json.dumps(response, indent=4, cls=JSONEncoder)) }
[ "boto3.dynamodb.conditions.Key", "json.dumps", "datetime.datetime.utcnow", "uuid.uuid1", "boto3.resource", "logging.getLogger" ]
[((213, 232), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (230, 232), False, 'import logging\n'), ((356, 382), 'boto3.resource', 'boto3.resource', (['"""dynamodb"""'], {}), "('dynamodb')\n", (370, 382), False, 'import boto3\n'), ((1823, 1867), 'json.dumps', 'json.dumps', (['order'], {'indent': '(4)', 'cls': 'JSONEncoder'}), '(order, indent=4, cls=JSONEncoder)\n', (1833, 1867), False, 'import json, uuid, os\n'), ((2460, 2472), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (2470, 2472), False, 'import json, uuid, os\n'), ((3479, 3550), 'json.dumps', 'json.dumps', (["{'id': _id, 'frontend': frontend, 'fe_order_id': feOrderId}"], {}), "({'id': _id, 'frontend': frontend, 'fe_order_id': feOrderId})\n", (3489, 3550), False, 'import json, uuid, os\n'), ((4301, 4348), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'cls': 'JSONEncoder'}), '(response, indent=4, cls=JSONEncoder)\n', (4311, 4348), False, 'import json, uuid, os\n'), ((5199, 5249), 'json.dumps', 'json.dumps', (['itemReceipt'], {'indent': '(4)', 'cls': 'JSONEncoder'}), '(itemReceipt, indent=4, cls=JSONEncoder)\n', (5209, 5249), False, 'import json, uuid, os\n'), ((5664, 5676), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (5674, 5676), False, 'import json, uuid, os\n'), ((7375, 7442), 'json.dumps', 'json.dumps', (["{'id': _id, 'frontend': frontend, 'bo_po_num': boPONum}"], {}), "({'id': _id, 'frontend': frontend, 'bo_po_num': boPONum})\n", (7385, 7442), False, 'import json, uuid, os\n'), ((8241, 8288), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'cls': 'JSONEncoder'}), '(response, indent=4, cls=JSONEncoder)\n', (8251, 8288), False, 'import json, uuid, os\n'), ((9131, 9178), 'json.dumps', 'json.dumps', (['customer'], {'indent': '(4)', 'cls': 'JSONEncoder'}), '(customer, indent=4, cls=JSONEncoder)\n', (9141, 9178), False, 'import json, uuid, os\n'), ((9579, 9591), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (9589, 9591), False, 'import json, uuid, os\n'), ((11067, 11144), 'json.dumps', 'json.dumps', (["{'id': _id, 'frontend': frontend, 'fe_customer_id': feCustomerId}"], {}), "({'id': _id, 'frontend': frontend, 'fe_customer_id': feCustomerId})\n", (11077, 11144), False, 'import json, uuid, os\n'), ((11919, 11966), 'json.dumps', 'json.dumps', (['response'], {'indent': '(4)', 'cls': 'JSONEncoder'}), '(response, indent=4, cls=JSONEncoder)\n', (11929, 11966), False, 'import json, uuid, os\n'), ((2166, 2183), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2181, 2183), False, 'from datetime import datetime, date\n'), ((2239, 2256), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2254, 2256), False, 'from datetime import datetime, date\n'), ((5470, 5487), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5485, 5487), False, 'from datetime import datetime, date\n'), ((9346, 9363), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9361, 9363), False, 'from datetime import datetime, date\n'), ((1373, 1388), 'boto3.dynamodb.conditions.Key', 'Key', (['"""frontend"""'], {}), "('frontend')\n", (1376, 1388), False, 'from boto3.dynamodb.conditions import Key, Attr\n'), ((1404, 1422), 'boto3.dynamodb.conditions.Key', 'Key', (['"""fe_order_id"""'], {}), "('fe_order_id')\n", (1407, 1422), False, 'from boto3.dynamodb.conditions import Key, Attr\n'), ((3993, 4010), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4008, 4010), False, 'from datetime import datetime, date\n'), ((4727, 4742), 'boto3.dynamodb.conditions.Key', 'Key', (['"""frontend"""'], {}), "('frontend')\n", (4730, 4742), False, 'from boto3.dynamodb.conditions import Key, Attr\n'), ((4758, 4774), 'boto3.dynamodb.conditions.Key', 'Key', (['"""bo_po_num"""'], {}), "('bo_po_num')\n", (4761, 4774), False, 'from boto3.dynamodb.conditions import Key, Attr\n'), ((7856, 7873), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7871, 7873), False, 'from datetime import datetime, date\n'), ((8654, 8669), 'boto3.dynamodb.conditions.Key', 'Key', (['"""frontend"""'], {}), "('frontend')\n", (8657, 8669), False, 'from boto3.dynamodb.conditions import Key, Attr\n'), ((8685, 8706), 'boto3.dynamodb.conditions.Key', 'Key', (['"""fe_customer_id"""'], {}), "('fe_customer_id')\n", (8688, 8706), False, 'from boto3.dynamodb.conditions import Key, Attr\n'), ((11605, 11622), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (11620, 11622), False, 'from datetime import datetime, date\n'), ((6843, 6860), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6858, 6860), False, 'from datetime import datetime, date\n'), ((10499, 10516), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (10514, 10516), False, 'from datetime import datetime, date\n')]
from flask_script import Manager from schedule import frontend, api from werkzeug.wsgi import DispatcherMiddleware from werkzeug.serving import run_simple from schedule.core import db manager = Manager(frontend.create_app()) @manager.command def runserver(): app = DispatcherMiddleware(frontend.create_app(), {'/api': api.create_app()}) run_simple('0.0.0.0', 5000, app, use_reloader=True, use_debugger=True) @manager.command def initdb(): db.create_all() @manager.command def dropdb(): db.drop_all(bind=None) if __name__ == '__main__': manager.run(default_command='runserver')
[ "schedule.core.db.drop_all", "schedule.api.create_app", "schedule.frontend.create_app", "werkzeug.serving.run_simple", "schedule.core.db.create_all" ]
[((203, 224), 'schedule.frontend.create_app', 'frontend.create_app', ([], {}), '()\n', (222, 224), False, 'from schedule import frontend, api\n'), ((348, 418), 'werkzeug.serving.run_simple', 'run_simple', (['"""0.0.0.0"""', '(5000)', 'app'], {'use_reloader': '(True)', 'use_debugger': '(True)'}), "('0.0.0.0', 5000, app, use_reloader=True, use_debugger=True)\n", (358, 418), False, 'from werkzeug.serving import run_simple\n'), ((456, 471), 'schedule.core.db.create_all', 'db.create_all', ([], {}), '()\n', (469, 471), False, 'from schedule.core import db\n'), ((509, 531), 'schedule.core.db.drop_all', 'db.drop_all', ([], {'bind': 'None'}), '(bind=None)\n', (520, 531), False, 'from schedule.core import db\n'), ((293, 314), 'schedule.frontend.create_app', 'frontend.create_app', ([], {}), '()\n', (312, 314), False, 'from schedule import frontend, api\n'), ((325, 341), 'schedule.api.create_app', 'api.create_app', ([], {}), '()\n', (339, 341), False, 'from schedule import frontend, api\n')]
''' Created on 28.12.2016 @author: sapejura ''' import threading import socket import select import queue from xcamserver.framebuffer import FrameQueue # from xcamserver import worker_ctx, dummy_worker class SocketServer(): def __init__(self): self.stop_event = threading.Event() self.thread = threading.Thread(name='socket thread', target=self._thread, args=(self.stop_event,)) self._data_size = 4096 # Size of data chunks read from camera self._frame_size = None # Size of frame read from camera self.camera_addr = None self.camera_socket = None # Connection to camera which we are reading self.server_socket = None def init(self, frame_size): self.close() self._frame_size = frame_size print('Creating new socket...') self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.server_socket.settimeout(60) self.server_socket.bind(('localhost', 0)) # self.server_socket.setblocking(1) # server_socket.bind(('localhost', 8000)) server_addr = self.server_socket.getsockname() print('...binded...') print('...picked server address: %s...' % str(server_addr)) self.server_socket.listen(100) # print('...server socket is listening max 10 connection.') def run(self): if self.is_alive(): raise Exception('Socket server is already online') self.inputs = [self.server_socket] self.outputs = [] self.stop_event.clear() self.thread.start() def stop(self): if self.is_alive(): self.stop_event.set() self.thread.join(10) if self.is_alive(): raise Exception('Socket server didn\'t stop.') for sock in self.outputs: try: sock.close() except: print('Could not close socket', sock) for sock in self.inputs: try: sock.close() except: print('Could not close socket', sock) else: raise Exception('Can\'t stop socket server. Server is already stopped.') def close(self): if self.is_alive(): self.stop() if self.server_socket: self.server_socket.close() if self.camera_socket: self.camera_socket.close() def is_alive(self): return self.thread.isAlive() def _thread(self, stop_event): # TODO: Exception handling # TODO: This will definitely stackoverflow if there are no clients ever. Queue needs buffered replacement # print('server_socket:', self.server_socket.getsockname(), 'client_socket:', self.camera_addr) queues = {} # Every outgoing socket gets its own send and receive queues while True: # Wait for sockets (sread, swrite, sexc) = select.select(self.inputs, self.outputs, [], 1) # Exit thread if stop_event.is_set(): self.inputs.clear() self.outputs.clear() break # Check incoming connections self._handle_read_reqs(queues, sread, swrite, sexc) # Write received camera data to outgoing sockets which are ready self._handle_write_reqs(queues, sread, swrite, sexc) print('Socket server closed') def _remove_socket(self, sock, queues, sread, swrite, sexc): sock_addr, peer_addr = None, None try: sock_addr = sock.getsockname() except: pass try: peer_addr = sock.getpeername() except: pass queues.pop(sock) if sock in self.inputs: self.inputs.remove(sock) if sock in self.outputs: self.outputs.remove(sock) if sock in sread: sread.remove(sock) if sock in swrite: swrite.remove(sock) if sock in sexc: sexc.remove(sock) sock.close() print('Closed connection to', peer_addr, 'from', sock_addr) def _handle_read_reqs(self, queues, sread, swrite, sexc): for sock in sread: if sock == self.server_socket: # A "readable" server socket is ready to accept a connection connection, client_address = sock.accept() print('new client registration from', client_address) connection.setblocking(0) if client_address == self.camera_addr: # Camera connection self._add_camera_sock(connection) else: self._add_client_sock(connection, queues) elif sock == self.camera_socket: # Camera is sending data self._recv_from_camera(sock, queues) else: error = self._recv_from_client(sock, queues) if error: self._remove_socket(sock, queues, sread, swrite, sexc) continue def _add_camera_sock(self, connection): print('It is the camera.') self.camera_socket = connection self.inputs.append(connection) def _add_client_sock(self, connection, queues): print('It is a new client application:', connection.getpeername()) # Add the socket to outgoing sockets # if sock not in self.outputs: # self.outputs.append(connection) self.inputs.append(connection) # Give the socket its own data queue because sockets # can be available for sending at different times if connection not in queues.keys(): # Outgoing data and control frames tx_q = FrameQueue(self._frame_size + 4) # timestamp is 4 bytes # Incoming control frames rx_q = FrameQueue(4) rx_q.set_mode(b'\x02') # Cares only about the newest control frames queues[connection] = (tx_q, rx_q) def _recv_from_camera(self, sock, queues): data = sock.recv(self._data_size) if data: for tx_q, _ in queues.values(): tx_q.put(data) else: pass def _recv_from_client(self, sock, queues): print('Connection from', sock.getpeername()) msg_size = 4 try: data = sock.recv(msg_size) except (ConnectionAbortedError, ConnectionResetError) as e: return True else: if data == b'': # Client disconnects print('Removing socket', sock.getpeername(), 'from listened inputs and outputs, closing connection.') return True else: print('Received ctrl data:', data, 'from client', sock.getpeername()) # print('Received something unexpected from,', sock.getpeername(), 'Data:', data) tx_q, rx_q = queues[sock] rx_q.put(data) if rx_q.buffer_size() >= msg_size: msg = rx_q.get(4) print('Received full ctrl package:', msg) # print(msg[0:1]) tx_q.set_mode(msg[0:1]) if sock not in self.outputs: self.outputs.append(sock) return False def _handle_write_reqs(self, queues, sread, swrite, sexc): for sock in swrite: # data = message_queue.get_nowait() tx_q, _ = queues[sock] data = tx_q.get() if len(data) == 0: pass else: try: sent_data = 0 # print('Sending data to socket', s.getsockname()) while(sent_data < len(data)): sent_data += sock.send(data[sent_data:]) # print('Sent', sent_data, 'bytes') # print('Sent data to socket', sock.getpeername()) except (ConnectionResetError, ConnectionAbortedError, ConnectionRefusedError) as e: sock_addr = sock.getpeername() print('Connection to', sock_addr, 'lost') print('%s(%s): %s' % (type(e).__name__, str(e.errno), e.strerror)) self._remove_socket(sock, queues, sread, swrite, sexc) continue
[ "threading.Thread", "xcamserver.framebuffer.FrameQueue", "socket.socket", "select.select", "threading.Event" ]
[((278, 295), 'threading.Event', 'threading.Event', ([], {}), '()\n', (293, 295), False, 'import threading\n'), ((318, 407), 'threading.Thread', 'threading.Thread', ([], {'name': '"""socket thread"""', 'target': 'self._thread', 'args': '(self.stop_event,)'}), "(name='socket thread', target=self._thread, args=(self.\n stop_event,))\n", (334, 407), False, 'import threading\n'), ((925, 974), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (938, 974), False, 'import socket\n'), ((3135, 3182), 'select.select', 'select.select', (['self.inputs', 'self.outputs', '[]', '(1)'], {}), '(self.inputs, self.outputs, [], 1)\n', (3148, 3182), False, 'import select\n'), ((5950, 5982), 'xcamserver.framebuffer.FrameQueue', 'FrameQueue', (['(self._frame_size + 4)'], {}), '(self._frame_size + 4)\n', (5960, 5982), False, 'from xcamserver.framebuffer import FrameQueue\n'), ((6064, 6077), 'xcamserver.framebuffer.FrameQueue', 'FrameQueue', (['(4)'], {}), '(4)\n', (6074, 6077), False, 'from xcamserver.framebuffer import FrameQueue\n')]
#!/usr/bin/env python3 # imports go here from xmlrpc.server import SimpleXMLRPCServer import xmlrpc.client from threading import Thread # # Free Coding session for 2015-02-07 # Written by <NAME> # def run_server(): server = SimpleXMLRPCServer(('localhost', 9000)) server.register_function(pow) server.register_function(lambda x, y: x+y, 'add') server.register_multicall_functions() server.serve_forever() def call_server(): s = xmlrpc.client.ServerProxy('http://localhost:9000') x = 2 while True: x = x + 1 print(s.pow(2, x)) t = Thread(target=run_server) t.start() t2 = Thread(target=call_server) t2.start()
[ "threading.Thread", "xmlrpc.server.SimpleXMLRPCServer" ]
[((584, 609), 'threading.Thread', 'Thread', ([], {'target': 'run_server'}), '(target=run_server)\n', (590, 609), False, 'from threading import Thread\n'), ((625, 651), 'threading.Thread', 'Thread', ([], {'target': 'call_server'}), '(target=call_server)\n', (631, 651), False, 'from threading import Thread\n'), ((231, 270), 'xmlrpc.server.SimpleXMLRPCServer', 'SimpleXMLRPCServer', (["('localhost', 9000)"], {}), "(('localhost', 9000))\n", (249, 270), False, 'from xmlrpc.server import SimpleXMLRPCServer\n')]
# This file is part of the Astrometry.net suite. # Licensed under a 3-clause BSD style license - see LICENSE from __future__ import print_function from __future__ import absolute_import import os from astrometry.util.fits import fits_table import numpy as np import logging import tempfile import sys py3 = (sys.version_info[0] >= 3) if py3: from urllib.parse import urljoin else: from urlparse import urljoin fitsio = None try: import fitsio except: try: import pyfits except ImportError: try: from astropy.io import fits as pyfits except ImportError: raise ImportError("Cannot import either pyfits or astropy.io.fits") from .common import * from .dr7 import * from .yanny import * from astrometry.util.run_command import run_command class Frame(SdssFile): def __init__(self, *args, **kwargs): super(Frame, self).__init__(*args, **kwargs) self.filetype = 'frame' self.image = None self.image_proxy = None def getImageShape(self): if self.image_proxy is not None: # fitsio fits.FITSHDU object H,W = self.image_proxy.get_info()['dims'] H = int(H) W = int(W) else: H,W = self.image.shape return H,W def getImageSlice(self, slice): if self.image_proxy is not None: #print 'reading slice from image proxy:', slice return self.image_proxy[slice] return self.image[slice] #def __str__(self): def getImage(self): if self.image is None and self.image_proxy is not None: self.image = self.image_proxy.read() self.image_proxy = None return self.image def getHeader(self): return self.header def getAsTrans(self): return self.astrans def getCalibVec(self): return self.calib def getSkyAt(self, x, y): skyim = self.sky (sh,sw) = skyim.shape if sw != 256: skyim = skyim.T (sh,sw) = skyim.shape xi = np.round(self.skyxi[x]).astype(int) yi = np.round(self.skyyi[y]).astype(int) yi = np.minimum(yi,sh-1) return skyim[yi,xi] def getSky(self): skyim = self.sky (sh,sw) = skyim.shape if sw != 256: skyim = skyim.T (sh,sw) = skyim.shape xi = np.round(self.skyxi).astype(int) yi = np.round(self.skyyi).astype(int) yi = np.minimum(yi,sh-1) assert(all(xi >= 0) and all(xi < sw)) assert(all(yi >= 0) and all(yi < sh)) XI,YI = np.meshgrid(xi, yi) # Nearest-neighbour interpolation -- we just need this # for approximate invvar. bigsky = skyim[YI,XI] return bigsky def getInvvar(self, psfield, bandnum, ignoreSourceFlux=False, sourceFlux=None, constantSkyAt=None): ''' If constantSkyAt = (x,y) (INTEGERS!), returns a scalar (rather than a np.array) of the invvar at that point. NOTE that this does NOT blank out masked pixels; use, eg, fpM = sdss.readFpM(run, camcol, field, bandname) for plane in [ 'INTERP', 'SATUR', 'CR', 'GHOST' ]: fpM.setMaskedPixels(plane, invvar, 0, roi=roi) ''' calibvec = self.getCalibVec() if constantSkyAt: x,y = constantSkyAt calibvec = calibvec[x] sky = self.getSkyAt(x,y) if ignoreSourceFlux: dn = sky elif sourceFlux is None: image = self.getImage() dn = (image[y,x] / calibvec) + sky else: dn = (sourceFlux / calibvec) + sky else: bigsky = self.getSky() if ignoreSourceFlux: dn = bigsky elif sourceFlux is None: image = self.getImage() dn = (image / calibvec) + bigsky else: dn = (sourceFlux / calibvec) + bigsky gain = psfield.getGain(bandnum) # Note, "darkvar" includes dark current *and* read noise. darkvar = psfield.getDarkVariance(bandnum) dnvar = (dn / gain) + darkvar invvar = 1./(dnvar * calibvec**2) return invvar class PhotoObj(SdssFile): def __init__(self, *args, **kwargs): super(PhotoObj, self).__init__(*args, **kwargs) self.filetype = 'photoObj' self.table = None def getTable(self): return self.table class runlist(object): pass class DR8(DR7): _lup_to_mag_b = np.array([1.4e-10, 0.9e-10, 1.2e-10, 1.8e-10, 7.4e-10]) _two_lup_to_mag_b = 2.*_lup_to_mag_b _ln_lup_to_mag_b = np.log(_lup_to_mag_b) ''' From http://data.sdss3.org/datamodel/glossary.html#asinh m = -(2.5/ln(10))*[asinh(f/2b)+ln(b)]. The parameter b is a softening parameter measured in maggies, and for the [u, g, r, i, z] bands has the values [1.4, 0.9, 1.2, 1.8, 7.4] x 1e-10 ''' @staticmethod def luptitude_to_mag(Lmag, bandnum, badmag=25): if bandnum is None: # assume Lmag is broadcastable to a 5-vector twobi = DR8._two_lup_to_mag_b lnbi = DR8._ln_lup_to_mag_b else: twobi = DR8._two_lup_to_mag_b[bandnum] lnbi = DR8._ln_lup_to_mag_b[bandnum] # MAGIC -1.08.... = -2.5/np.log(10.) f = np.sinh(Lmag/-1.0857362047581294 - lnbi) * twobi # prevent log10(-flux) mag = np.zeros_like(f) + badmag I = (f > 0) mag[I] = -2.5 * np.log10(f[I]) return mag @staticmethod def nmgy_to_mag(nmgy): return 22.5 - 2.5 * np.log10(nmgy) def getDRNumber(self): return 8 def useLocalTree(self, photoObjs=None, resolve=None): if photoObjs is None: photoObjs = os.environ['BOSS_PHOTOOBJ'] redux = os.environ['PHOTO_REDUX'] if resolve is None: resolve = os.environ['PHOTO_RESOLVE'] self.filenames.update( photoObj = os.path.join(photoObjs, '%(rerun)s', '%(run)i', '%(camcol)i', 'photoObj-%(run)06i-%(camcol)i-%(field)04i.fits'), frame = os.path.join(photoObjs, 'frames', '%(rerun)s', '%(run)i', '%(camcol)i', 'frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2'), photoField = os.path.join(photoObjs, '%(rerun)s', '%(run)i', 'photoField-%(run)06i-%(camcol)i.fits'), psField = os.path.join(redux, '%(rerun)s', '%(run)i', 'objcs', '%(camcol)i', 'psField-%(run)06i-%(camcol)i-%(field)04i.fit'), fpM = os.path.join(redux, '%(rerun)s', '%(run)i', 'objcs', '%(camcol)i', 'fpM-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.gz'), window_flist = os.path.join(resolve, 'window_flist.fits'), ) # use fpM files compressed try: del self.dassuffix['fpM'] except: pass try: del self.processcmds['fpM'] except: pass def saveUnzippedFiles(self, basedir): self.unzip_dir = basedir def setFitsioReadBZ2(self, to=True): ''' Call this if fitsio supports reading .bz2 files directly. ''' self.readBz2 = to def __init__(self, **kwargs): ''' Useful kwargs: basedir : (string) - local directory where data will be stored. ''' DR7.__init__(self, **kwargs) self.unzip_dir = None self.readBz2 = False # Local filenames self.filenames.update({ 'frame': 'frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2', 'idR': 'idR-%(run)06i-%(band)s-%(camcol)i-%(field)04i.fits', 'photoObj': 'photoObj-%(run)06i-%(camcol)i-%(field)04i.fits', 'photoField': 'photoField-%(run)06i-%(camcol)i.fits', 'window_flist': 'window_flist.fits', }) # URLs on DAS server self.dasurl = 'http://data.sdss3.org/sas/dr8/groups/boss/' self.daspaths = { 'idR': 'photo/data/%(run)i/fields/%(camcol)i/idR-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.Z', 'fpObjc': 'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/fpObjc-%(run)06i-%(camcol)i-%(field)04i.fit', # DR8 frames are no longer available on DAS. 'frame': '/sas/dr9/boss/photoObj/frames/%(rerun)s/%(run)i/%(camcol)i/frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2', #'frame': 'photoObj/frames/%(rerun)s/%(run)i/%(camcol)i/frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2', 'photoObj': 'photoObj/%(rerun)s/%(run)i/%(camcol)i/photoObj-%(run)06i-%(camcol)i-%(field)04i.fits', 'psField': 'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/psField-%(run)06i-%(camcol)i-%(field)04i.fit', 'photoField': 'photoObj/%(rerun)s/%(run)i/photoField-%(run)06i-%(camcol)i.fits', 'fpM': 'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/fpM-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.gz', 'fpAtlas': 'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/fpAtlas-%(run)06i-%(camcol)i-%(field)04i.fit', 'window_flist': 'resolve/2010-05-23/window_flist.fits', } self.dassuffix = { #'frame': '.bz2', 'fpM': '.gz', 'idR': '.Z', } # called in retrieve() self.processcmds = { 'fpM': 'gunzip -cd %(input)s > %(output)s', 'idR': 'gunzip -cd %(input)s > %(output)s', } self.postprocesscmds = { 'frame': 'TMPFILE=$(mktemp %(output)s.tmp.XXXXXX) && bunzip2 -cd %(input)s > $TMPFILE && mv $TMPFILE %(output)s', } y = read_yanny(self._get_runlist_filename()) y = y['RUNDATA'] rl = runlist() rl.run = np.array(y['run']) rl.startfield = np.array(y['startfield']) rl.endfield = np.array(y['endfield']) rl.rerun = np.array(y['rerun']) #print 'Rerun type:', type(rl.rerun), rl.rerun.dtype self.runlist = rl self.logger = logging.getLogger('astrometry.sdss.DR%i' % self.getDRNumber()) #self.logger.debug('debug test') #self.logger.info('info test') #self.logger.warning('warning test') def _unzip_frame(self, fn, run, camcol): if self.readBz2: return None,True # No, PJM reported that pyfits failed on SDSS frame*.bz2 files # if not fitsio: # # pyfits can read .bz2 # return None,True tempfn = None keep = False filetype = 'frame' if not(filetype in self.postprocesscmds and fn.endswith('.bz2')): return None,True cmd = self.postprocesscmds[filetype] if self.unzip_dir is not None: udir = os.path.join(self.unzip_dir, '%i' % run, '%i' % camcol) if not os.path.exists(udir): try: os.makedirs(udir) except: pass tempfn = os.path.join(udir, os.path.basename(fn).replace('.bz2', '')) #print 'Checking', tempfn if os.path.exists(tempfn): print('File exists:', tempfn) return tempfn,True else: print('Saving to', tempfn) keep = True else: fid,tempfn = tempfile.mkstemp() os.close(fid) cmd = cmd % dict(input = fn, output = tempfn) self.logger.debug('cmd: %s' % cmd) print('command:', cmd) (rtn,out,err) = run_command(cmd) if rtn: print('Command failed: command', cmd) print('Output:', out) print('Error:', err) print('Return val:', rtn) raise RuntimeError('Command failed (return val %i): %s' % (rtn, cmd)) print(out) print(err) return tempfn,keep def _get_runlist_filename(self): return self._get_data_file('runList-dr8.par') # read a data file describing the DR8 data def _get_data_file(self, fn): return os.path.join(os.path.dirname(__file__), fn) def get_rerun(self, run, field=None): I = (self.runlist.run == run) if field is not None: I *= (self.runlist.startfield <= field) * (self.runlist.endfield >= field) I = np.flatnonzero(I) reruns = np.unique(self.runlist.rerun[I]) #print 'Run', run, '-> reruns:', reruns if len(reruns) == 0: return None return reruns[-1] def get_url(self, filetype, run, camcol, field, band=None, rerun=None): if rerun is None: rerun = self.get_rerun(run, field) path = self.daspaths[filetype] url = urljoin(self.dasurl, path % dict( run=run, camcol=camcol, field=field, rerun=rerun, band=band)) return url def retrieve(self, filetype, run, camcol, field=None, band=None, skipExisting=True, tempsuffix='.tmp', rerun=None): outfn = self.getPath(filetype, run, camcol, field, band, rerun=rerun) print('Checking for file', outfn) if outfn is None: return None if skipExisting and os.path.exists(outfn): #print('Exists') return outfn outdir = os.path.dirname(outfn) if not os.path.exists(outdir): try: os.makedirs(outdir) except: pass url = self.get_url(filetype, run, camcol, field, band=band, rerun=rerun) #print 'Did not find file:', outfn print('Retrieving from URL:', url) if self.curl: cmd = "curl -o '%(outfn)s' '%(url)s'" else: cmd = "wget --continue -nv -O %(outfn)s '%(url)s'" # suffix to add to the downloaded filename suff = self.dassuffix.get(filetype, '') oo = outfn + suff if tempsuffix is not None: oo += tempsuffix cmd = cmd % dict(outfn=oo, url=url) self.logger.debug('cmd: %s' % cmd) (rtn,out,err) = run_command(cmd) if rtn: print('Command failed: command', cmd) print('Output:', out) print('Error:', err) print('Return val:', rtn) return None if tempsuffix is not None: # self.logger.debug('Renaming %s to %s' % (oo, outfn+suff)) os.rename(oo, outfn + suff) if filetype in self.processcmds: cmd = self.processcmds[filetype] cmd = cmd % dict(input = outfn + suff, output = outfn) self.logger.debug('cmd: %s' % cmd) (rtn,out,err) = run_command(cmd) if rtn: print('Command failed: command', cmd) print('Output:', out) print('Error:', err) print('Return val:', rtn) return None return outfn def readPhotoObj(self, run, camcol, field, filename=None): obj = PhotoObj(run, camcol, field) if filename is None: fn = self.getPath('photoObj', run, camcol, field) else: fn = filename obj.table = fits_table(fn) return obj def readFrame(self, run, camcol, field, band, filename=None): ''' http://data.sdss3.org/datamodel/files/BOSS_PHOTOOBJ/frames/RERUN/RUN/CAMCOL/frame.html ''' f = Frame(run, camcol, field, band) # ... if filename is None: fn = self.getPath('frame', run, camcol, field, band) else: fn = filename # optionally bunzip2 the frame file. tempfn,keep = self._unzip_frame(fn, run, camcol) if tempfn is not None: fn = tempfn if fitsio: print('Frame filename', fn) # eg /clusterfs/riemann/raid006/dr10/boss/photoObj/frames/301/2825/1/frame-u-002825-1-0126.fits.bz2 F = fitsio.FITS(fn, lower=True) f.header = F[0].read_header() # Allow later reading of just the pixels of interest. f.image_proxy = F[0] f.calib = F[1].read() sky = F[2].read_columns(['allsky', 'xinterp', 'yinterp']) #print 'sky', type(sky) # ... supposed to be a recarray, but it's not... f.sky, f.skyxi, f.skyyi = sky.tolist()[0] tab = fits_table(F[3].read()) if not keep and tempfn is not None: os.remove(tempfn) else: p = pyfits.open(fn) # in nanomaggies f.image = p[0].data f.header = p[0].header # converts counts -> nanomaggies f.calib = p[1].data # table with val,x,y -- binned; use bilinear interpolation to expand sky = p[2].data # table -- asTrans structure tab = fits_table(p[3].data) f.sky = sky.field('allsky')[0] f.skyxi = sky.field('xinterp')[0] f.skyyi = sky.field('yinterp')[0] #print 'sky shape', f.sky.shape if len(f.sky.shape) != 2: f.sky = f.sky.reshape((-1, 256)) assert(len(tab) == 1) tab = tab[0] # DR7 has NODE, INCL in radians... f.astrans = AsTrans(run, camcol, field, band, node=np.deg2rad(tab.node), incl=np.deg2rad(tab.incl), astrans=tab, cut_to_band=False) return f
[ "os.remove", "os.close", "os.path.join", "numpy.round", "numpy.unique", "numpy.meshgrid", "numpy.zeros_like", "os.path.dirname", "os.path.exists", "numpy.log10", "numpy.minimum", "os.path.basename", "os.rename", "astropy.io.fits.open", "astrometry.util.fits.fits_table", "astrometry.util.run_command.run_command", "numpy.log", "tempfile.mkstemp", "fitsio.FITS", "os.makedirs", "numpy.deg2rad", "numpy.flatnonzero", "numpy.array", "numpy.sinh" ]
[((4601, 4654), 'numpy.array', 'np.array', (['[1.4e-10, 9e-11, 1.2e-10, 1.8e-10, 7.4e-10]'], {}), '([1.4e-10, 9e-11, 1.2e-10, 1.8e-10, 7.4e-10])\n', (4609, 4654), True, 'import numpy as np\n'), ((4721, 4742), 'numpy.log', 'np.log', (['_lup_to_mag_b'], {}), '(_lup_to_mag_b)\n', (4727, 4742), True, 'import numpy as np\n'), ((2169, 2191), 'numpy.minimum', 'np.minimum', (['yi', '(sh - 1)'], {}), '(yi, sh - 1)\n', (2179, 2191), True, 'import numpy as np\n'), ((2484, 2506), 'numpy.minimum', 'np.minimum', (['yi', '(sh - 1)'], {}), '(yi, sh - 1)\n', (2494, 2506), True, 'import numpy as np\n'), ((2612, 2631), 'numpy.meshgrid', 'np.meshgrid', (['xi', 'yi'], {}), '(xi, yi)\n', (2623, 2631), True, 'import numpy as np\n'), ((10059, 10077), 'numpy.array', 'np.array', (["y['run']"], {}), "(y['run'])\n", (10067, 10077), True, 'import numpy as np\n'), ((10102, 10127), 'numpy.array', 'np.array', (["y['startfield']"], {}), "(y['startfield'])\n", (10110, 10127), True, 'import numpy as np\n'), ((10150, 10173), 'numpy.array', 'np.array', (["y['endfield']"], {}), "(y['endfield'])\n", (10158, 10173), True, 'import numpy as np\n'), ((10193, 10213), 'numpy.array', 'np.array', (["y['rerun']"], {}), "(y['rerun'])\n", (10201, 10213), True, 'import numpy as np\n'), ((11875, 11891), 'astrometry.util.run_command.run_command', 'run_command', (['cmd'], {}), '(cmd)\n', (11886, 11891), False, 'from astrometry.util.run_command import run_command\n'), ((12673, 12690), 'numpy.flatnonzero', 'np.flatnonzero', (['I'], {}), '(I)\n', (12687, 12690), True, 'import numpy as np\n'), ((12708, 12740), 'numpy.unique', 'np.unique', (['self.runlist.rerun[I]'], {}), '(self.runlist.rerun[I])\n', (12717, 12740), True, 'import numpy as np\n'), ((13662, 13684), 'os.path.dirname', 'os.path.dirname', (['outfn'], {}), '(outfn)\n', (13677, 13684), False, 'import os\n'), ((14445, 14461), 'astrometry.util.run_command.run_command', 'run_command', (['cmd'], {}), '(cmd)\n', (14456, 14461), False, 'from astrometry.util.run_command import run_command\n'), ((15562, 15576), 'astrometry.util.fits.fits_table', 'fits_table', (['fn'], {}), '(fn)\n', (15572, 15576), False, 'from astrometry.util.fits import fits_table\n'), ((5435, 5477), 'numpy.sinh', 'np.sinh', (['(Lmag / -1.0857362047581294 - lnbi)'], {}), '(Lmag / -1.0857362047581294 - lnbi)\n', (5442, 5477), True, 'import numpy as np\n'), ((5529, 5545), 'numpy.zeros_like', 'np.zeros_like', (['f'], {}), '(f)\n', (5542, 5545), True, 'import numpy as np\n'), ((5599, 5613), 'numpy.log10', 'np.log10', (['f[I]'], {}), '(f[I])\n', (5607, 5613), True, 'import numpy as np\n'), ((11103, 11158), 'os.path.join', 'os.path.join', (['self.unzip_dir', "('%i' % run)", "('%i' % camcol)"], {}), "(self.unzip_dir, '%i' % run, '%i' % camcol)\n", (11115, 11158), False, 'import os\n'), ((11443, 11465), 'os.path.exists', 'os.path.exists', (['tempfn'], {}), '(tempfn)\n', (11457, 11465), False, 'import os\n'), ((11677, 11695), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (11693, 11695), False, 'import tempfile\n'), ((11708, 11721), 'os.close', 'os.close', (['fid'], {}), '(fid)\n', (11716, 11721), False, 'import os\n'), ((12432, 12457), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (12447, 12457), False, 'import os\n'), ((13568, 13589), 'os.path.exists', 'os.path.exists', (['outfn'], {}), '(outfn)\n', (13582, 13589), False, 'import os\n'), ((13700, 13722), 'os.path.exists', 'os.path.exists', (['outdir'], {}), '(outdir)\n', (13714, 13722), False, 'import os\n'), ((14789, 14816), 'os.rename', 'os.rename', (['oo', '(outfn + suff)'], {}), '(oo, outfn + suff)\n', (14798, 14816), False, 'import os\n'), ((15046, 15062), 'astrometry.util.run_command.run_command', 'run_command', (['cmd'], {}), '(cmd)\n', (15057, 15062), False, 'from astrometry.util.run_command import run_command\n'), ((16322, 16349), 'fitsio.FITS', 'fitsio.FITS', (['fn'], {'lower': '(True)'}), '(fn, lower=True)\n', (16333, 16349), False, 'import fitsio\n'), ((16901, 16916), 'astropy.io.fits.open', 'pyfits.open', (['fn'], {}), '(fn)\n', (16912, 16916), True, 'from astropy.io import fits as pyfits\n'), ((17258, 17279), 'astrometry.util.fits.fits_table', 'fits_table', (['p[3].data'], {}), '(p[3].data)\n', (17268, 17279), False, 'from astrometry.util.fits import fits_table\n'), ((2071, 2094), 'numpy.round', 'np.round', (['self.skyxi[x]'], {}), '(self.skyxi[x])\n', (2079, 2094), True, 'import numpy as np\n'), ((2120, 2143), 'numpy.round', 'np.round', (['self.skyyi[y]'], {}), '(self.skyyi[y])\n', (2128, 2143), True, 'import numpy as np\n'), ((2392, 2412), 'numpy.round', 'np.round', (['self.skyxi'], {}), '(self.skyxi)\n', (2400, 2412), True, 'import numpy as np\n'), ((2438, 2458), 'numpy.round', 'np.round', (['self.skyyi'], {}), '(self.skyyi)\n', (2446, 2458), True, 'import numpy as np\n'), ((5707, 5721), 'numpy.log10', 'np.log10', (['nmgy'], {}), '(nmgy)\n', (5715, 5721), True, 'import numpy as np\n'), ((6091, 6206), 'os.path.join', 'os.path.join', (['photoObjs', '"""%(rerun)s"""', '"""%(run)i"""', '"""%(camcol)i"""', '"""photoObj-%(run)06i-%(camcol)i-%(field)04i.fits"""'], {}), "(photoObjs, '%(rerun)s', '%(run)i', '%(camcol)i',\n 'photoObj-%(run)06i-%(camcol)i-%(field)04i.fits')\n", (6103, 6206), False, 'import os\n'), ((6260, 6395), 'os.path.join', 'os.path.join', (['photoObjs', '"""frames"""', '"""%(rerun)s"""', '"""%(run)i"""', '"""%(camcol)i"""', '"""frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2"""'], {}), "(photoObjs, 'frames', '%(rerun)s', '%(run)i', '%(camcol)i',\n 'frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2')\n", (6272, 6395), False, 'import os\n'), ((6454, 6545), 'os.path.join', 'os.path.join', (['photoObjs', '"""%(rerun)s"""', '"""%(run)i"""', '"""photoField-%(run)06i-%(camcol)i.fits"""'], {}), "(photoObjs, '%(rerun)s', '%(run)i',\n 'photoField-%(run)06i-%(camcol)i.fits')\n", (6466, 6545), False, 'import os\n'), ((6603, 6721), 'os.path.join', 'os.path.join', (['redux', '"""%(rerun)s"""', '"""%(run)i"""', '"""objcs"""', '"""%(camcol)i"""', '"""psField-%(run)06i-%(camcol)i-%(field)04i.fit"""'], {}), "(redux, '%(rerun)s', '%(run)i', 'objcs', '%(camcol)i',\n 'psField-%(run)06i-%(camcol)i-%(field)04i.fit')\n", (6615, 6721), False, 'import os\n'), ((6772, 6897), 'os.path.join', 'os.path.join', (['redux', '"""%(rerun)s"""', '"""%(run)i"""', '"""objcs"""', '"""%(camcol)i"""', '"""fpM-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.gz"""'], {}), "(redux, '%(rerun)s', '%(run)i', 'objcs', '%(camcol)i',\n 'fpM-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.gz')\n", (6784, 6897), False, 'import os\n'), ((6953, 6995), 'os.path.join', 'os.path.join', (['resolve', '"""window_flist.fits"""'], {}), "(resolve, 'window_flist.fits')\n", (6965, 6995), False, 'import os\n'), ((11178, 11198), 'os.path.exists', 'os.path.exists', (['udir'], {}), '(udir)\n', (11192, 11198), False, 'import os\n'), ((13757, 13776), 'os.makedirs', 'os.makedirs', (['outdir'], {}), '(outdir)\n', (13768, 13776), False, 'import os\n'), ((16852, 16869), 'os.remove', 'os.remove', (['tempfn'], {}), '(tempfn)\n', (16861, 16869), False, 'import os\n'), ((17717, 17737), 'numpy.deg2rad', 'np.deg2rad', (['tab.node'], {}), '(tab.node)\n', (17727, 17737), True, 'import numpy as np\n'), ((17744, 17764), 'numpy.deg2rad', 'np.deg2rad', (['tab.incl'], {}), '(tab.incl)\n', (17754, 17764), True, 'import numpy as np\n'), ((11241, 11258), 'os.makedirs', 'os.makedirs', (['udir'], {}), '(udir)\n', (11252, 11258), False, 'import os\n'), ((11348, 11368), 'os.path.basename', 'os.path.basename', (['fn'], {}), '(fn)\n', (11364, 11368), False, 'import os\n')]
import base64 import json import sys import wave from flask import Flask, jsonify, request from flask_cors import CORS import parselmouth import pandas as pd from scipy.signal import find_peaks import numpy as np import matplotlib.pyplot as plt app = Flask(__name__) app_config = {"host": "0.0.0.0", "port": sys.argv[1]} """ ---------------------- DEVELOPER MODE CONFIG ----------------------- """ # Developer mode uses app.py if "app.py" in sys.argv[0]: # Update app config app_config["debug"] = True # CORS settings cors = CORS(app, resource={ r"/*":{ "origins":"*" } }) # CORS headers app.config["CORS_HEADERS"] = "Content-Type" """ --------------------------- REST CALLS ----------------------------- """ def draw_pitch(pitch): # Extract selected pitch contour, and # replace unvoiced samples by NaN to not plot pitch_values = pitch.selected_array['frequency'] pitch_values[pitch_values==0] = np.nan plt.plot(pitch.xs(), pitch_values, 'o', markersize=5, color='w') plt.plot(pitch.xs(), pitch_values, 'o', markersize=2) plt.grid(False) plt.ylim(0, pitch.ceiling) plt.ylabel("fundamental frequency [Hz]") # Remove and replace with your own @app.route("/example",methods=['GET','POST']) def example(): print(request.json) data=request.get_json() snd = parselmouth.Sound(data['filepath']) pitch = snd.to_pitch() plt.figure() plt.twinx() x=pitch.xs() y=pitch.selected_array['frequency'] dataPoints=[] for i in range(len(y)): if(y[i]!=0): dataPoints.append({"x":x[i],"y":y[i]}) print(dataPoints) draw_pitch(pitch) plt.xlim([snd.xmin, snd.xmax]) name="image1.png" plt.savefig(name) data="" with open("image1.png", "rb") as image_file: data = format(base64.b64encode(image_file.read())) # See /src/components/App.js for frontend call return {"dataPoints":dataPoints} @app.route("/wavepattern",methods=['GET','POST']) def wavepattern(): data=request.get_json() snd = parselmouth.Sound(data['filepath']) pitch = snd.to_pitch() plt.figure() plt.twinx() path = data['filepath'] raw = wave.open(path) signal = raw.readframes(-1) signal = np.frombuffer(signal, dtype ="int16") f_rate = raw.getframerate() time = np.linspace( 0, len(signal) / f_rate, num = len(signal) ) dataPoints=[] cnt=0 cur_x=0 cur_y=0 for i in range(len(signal)): if i%100==0: dataPoints.append({"x":cur_x/100,"y":cur_y/100}) cur_x=0 cur_y=0 else: cur_x+=float(time[i]) cur_y+=float(signal[i]) plt.ylabel("fundamental frequency [Hz]") plt.plot(time, signal,color="red") # plt.xlim([snd.xmin, snd.xmax]) name="image2.png" plt.savefig(name) data="" with open("image2.png", "rb") as image_file: data = format(base64.b64encode(image_file.read())) # See /src/components/App.js for frontend call # print(dataPoints) return jsonify({"dataPoints":dataPoints}) def differentitate_pitch(pitch,pitch2,pitch_values1,pitch_values2,s1,s2): # Extract selected pitch contour, and # replace unvoiced samples by NaN to not plot if s1>s2: pitch_values1=pitch_values1[:s2] if s1<s2: pitch_values2=pitch_values2[:s1] cnt = 0 p = np.empty((pitch_values1.size)) for i in range(0,pitch_values1.size): p[i]=np.nan for i in range(0,pitch_values1.size): if abs(pitch_values1[i]-pitch_values2[i])>50: #print(pitch_values2[i]) p[i]=pitch_values2[i] cnt += 1 # print(cnt) # print(p) #plt.plot(pitch2.xs(), pitch_values2, 'o', markersize=5, color='w',label='differences') #plt.plot(pitch2.xs(), pitch_values2, 'o', markersize=2) if s1>s2: plt.plot(pitch2.xs(), pitch_values2, 'o', markersize=5, color='w',label='differences') plt.plot(pitch2.xs(), pitch_values2, 'o', markersize=2) plt.plot(pitch2.xs(), p, 'o', markersize=5, color='w',label='normal') plt.plot(pitch2.xs(), p, 'o', markersize=2) #draw_pitch(pitch) if s1<s2: plt.plot(pitch.xs(), pitch_values1, 'o', markersize=5, color='w',label='differences') plt.plot(pitch.xs(), pitch_values1, 'o', markersize=2) plt.plot(pitch.xs(), p, 'o', markersize=5, color='w',label='normal') plt.plot(pitch.xs(), p, 'o', markersize=2) #draw_pitch(pitch2) plt.grid(False) plt.ylim(0, pitch.ceiling) plt.ylabel("fundamental frequency [Hz]") @app.route("/speechpattern",methods=['GET','POST']) def speechpattern(): data=request.get_json() snd = parselmouth.Sound(data['filepath1']) pitch = snd.to_pitch() snd2 = parselmouth.Sound(data['filepath2']) pitch2 = snd2.to_pitch() pitch_values1 = pitch.selected_array['frequency'] pitch_values1[pitch_values1==0] = np.nan pitch_values2 = pitch2.selected_array['frequency'] pitch_values2[pitch_values2==0] = np.nan s1=pitch_values1.size s2=pitch_values2.size if s1>s2: draw_pitch(pitch) differentitate_pitch(pitch,pitch2,pitch_values1,pitch_values2,s1,s2) if s1<s2: draw_pitch(pitch2) plt.xlim([snd2.xmin-0.2, snd2.xmax+0.2]) name="image3.png" plt.savefig(name) data="" with open("image3.png", "rb") as image_file: data = format(base64.b64encode(image_file.read())) # See /src/components/App.js for frontend call return jsonify({"imagename":data[2:-1]}) @app.route("/highlight",methods=['GET','POST']) def highlight(): data=request.get_json() snd = parselmouth.Sound(data['filepath']) pitch = snd.to_pitch() plt.figure() plt.twinx() pitch_values = pitch.selected_array['frequency'] x=pitch.xs() y=pitch.selected_array['frequency'] dataPoints=[] for i in range(len(y)): if(y[i]!=0): dataPoints.append({"x":x[i],"y":y[i]}) s = pitch_values.size p = np.empty(s) for i in range(s-15): flag = 0 for j in range(0,15): if abs(pitch_values[i]-pitch_values[i+j])>5: flag=1 if flag == 0: for j in range(0,15): p[i+j]=pitch_values[i+j] pitch_values[pitch_values==0] = np.nan dataPoints2=[] x=pitch.xs() y=p for i in range(len(y)): if(y[i]!=0): dataPoints2.append({"x":x[i],"y":y[i]}) p[p==0] = np.nan plt.plot(pitch.xs(), pitch_values, 'o', markersize=5, color='w') plt.plot(pitch.xs(), pitch_values, 'o', markersize=2) plt.plot(pitch.xs(), p, 'o', markersize=5, color='w') plt.plot(pitch.xs(), p, 'o', markersize=2) plt.grid(False) plt.ylim(0, pitch.ceiling) plt.ylabel("fundamental frequency [Hz]") plt.xlim([snd.xmin-0.2, snd.xmax+0.2]) name="image4.png" plt.savefig(name) data="" with open("image4.png", "rb") as image_file: data = format(base64.b64encode(image_file.read())) # See /src/components/App.js for frontend call return jsonify({"normal":dataPoints,"highlight":dataPoints2}) """ -------------------------- APP SERVICES ---------------------------- """ # Quits Flask on Electron exit @app.route("/quit") def quit(): shutdown = request.environ.get("werkzeug.server.shutdown") shutdown() return if __name__ == "__main__": app.run(**app_config)
[ "matplotlib.pyplot.xlim", "wave.open", "parselmouth.Sound", "matplotlib.pyplot.plot", "matplotlib.pyplot.ylim", "flask_cors.CORS", "matplotlib.pyplot.twinx", "numpy.frombuffer", "flask.Flask", "numpy.empty", "flask.request.environ.get", "matplotlib.pyplot.figure", "flask.jsonify", "matplotlib.pyplot.ylabel", "matplotlib.pyplot.grid", "flask.request.get_json", "matplotlib.pyplot.savefig" ]
[((253, 268), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (258, 268), False, 'from flask import Flask, jsonify, request\n'), ((537, 581), 'flask_cors.CORS', 'CORS', (['app'], {'resource': "{'/*': {'origins': '*'}}"}), "(app, resource={'/*': {'origins': '*'}})\n", (541, 581), False, 'from flask_cors import CORS\n'), ((1086, 1101), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (1094, 1101), True, 'import matplotlib.pyplot as plt\n'), ((1106, 1132), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', 'pitch.ceiling'], {}), '(0, pitch.ceiling)\n', (1114, 1132), True, 'import matplotlib.pyplot as plt\n'), ((1137, 1177), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""fundamental frequency [Hz]"""'], {}), "('fundamental frequency [Hz]')\n", (1147, 1177), True, 'import matplotlib.pyplot as plt\n'), ((1308, 1326), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1324, 1326), False, 'from flask import Flask, jsonify, request\n'), ((1335, 1370), 'parselmouth.Sound', 'parselmouth.Sound', (["data['filepath']"], {}), "(data['filepath'])\n", (1352, 1370), False, 'import parselmouth\n'), ((1398, 1410), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1408, 1410), True, 'import matplotlib.pyplot as plt\n'), ((1413, 1424), 'matplotlib.pyplot.twinx', 'plt.twinx', ([], {}), '()\n', (1422, 1424), True, 'import matplotlib.pyplot as plt\n'), ((1624, 1654), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[snd.xmin, snd.xmax]'], {}), '([snd.xmin, snd.xmax])\n', (1632, 1654), True, 'import matplotlib.pyplot as plt\n'), ((1678, 1695), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (1689, 1695), True, 'import matplotlib.pyplot as plt\n'), ((1977, 1995), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1993, 1995), False, 'from flask import Flask, jsonify, request\n'), ((2004, 2039), 'parselmouth.Sound', 'parselmouth.Sound', (["data['filepath']"], {}), "(data['filepath'])\n", (2021, 2039), False, 'import parselmouth\n'), ((2067, 2079), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2077, 2079), True, 'import matplotlib.pyplot as plt\n'), ((2082, 2093), 'matplotlib.pyplot.twinx', 'plt.twinx', ([], {}), '()\n', (2091, 2093), True, 'import matplotlib.pyplot as plt\n'), ((2129, 2144), 'wave.open', 'wave.open', (['path'], {}), '(path)\n', (2138, 2144), False, 'import wave\n'), ((2189, 2225), 'numpy.frombuffer', 'np.frombuffer', (['signal'], {'dtype': '"""int16"""'}), "(signal, dtype='int16')\n", (2202, 2225), True, 'import numpy as np\n'), ((2597, 2637), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""fundamental frequency [Hz]"""'], {}), "('fundamental frequency [Hz]')\n", (2607, 2637), True, 'import matplotlib.pyplot as plt\n'), ((2640, 2675), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'signal'], {'color': '"""red"""'}), "(time, signal, color='red')\n", (2648, 2675), True, 'import matplotlib.pyplot as plt\n'), ((2737, 2754), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (2748, 2754), True, 'import matplotlib.pyplot as plt\n'), ((2947, 2982), 'flask.jsonify', 'jsonify', (["{'dataPoints': dataPoints}"], {}), "({'dataPoints': dataPoints})\n", (2954, 2982), False, 'from flask import Flask, jsonify, request\n'), ((3262, 3290), 'numpy.empty', 'np.empty', (['pitch_values1.size'], {}), '(pitch_values1.size)\n', (3270, 3290), True, 'import numpy as np\n'), ((4308, 4323), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (4316, 4323), True, 'import matplotlib.pyplot as plt\n'), ((4326, 4352), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', 'pitch.ceiling'], {}), '(0, pitch.ceiling)\n', (4334, 4352), True, 'import matplotlib.pyplot as plt\n'), ((4355, 4395), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""fundamental frequency [Hz]"""'], {}), "('fundamental frequency [Hz]')\n", (4365, 4395), True, 'import matplotlib.pyplot as plt\n'), ((4478, 4496), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (4494, 4496), False, 'from flask import Flask, jsonify, request\n'), ((4505, 4541), 'parselmouth.Sound', 'parselmouth.Sound', (["data['filepath1']"], {}), "(data['filepath1'])\n", (4522, 4541), False, 'import parselmouth\n'), ((4576, 4612), 'parselmouth.Sound', 'parselmouth.Sound', (["data['filepath2']"], {}), "(data['filepath2'])\n", (4593, 4612), False, 'import parselmouth\n'), ((5022, 5066), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[snd2.xmin - 0.2, snd2.xmax + 0.2]'], {}), '([snd2.xmin - 0.2, snd2.xmax + 0.2])\n', (5030, 5066), True, 'import matplotlib.pyplot as plt\n'), ((5093, 5110), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (5104, 5110), True, 'import matplotlib.pyplot as plt\n'), ((5284, 5318), 'flask.jsonify', 'jsonify', (["{'imagename': data[2:-1]}"], {}), "({'imagename': data[2:-1]})\n", (5291, 5318), False, 'from flask import Flask, jsonify, request\n'), ((5393, 5411), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (5409, 5411), False, 'from flask import Flask, jsonify, request\n'), ((5420, 5455), 'parselmouth.Sound', 'parselmouth.Sound', (["data['filepath']"], {}), "(data['filepath'])\n", (5437, 5455), False, 'import parselmouth\n'), ((5483, 5495), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (5493, 5495), True, 'import matplotlib.pyplot as plt\n'), ((5498, 5509), 'matplotlib.pyplot.twinx', 'plt.twinx', ([], {}), '()\n', (5507, 5509), True, 'import matplotlib.pyplot as plt\n'), ((5754, 5765), 'numpy.empty', 'np.empty', (['s'], {}), '(s)\n', (5762, 5765), True, 'import numpy as np\n'), ((6394, 6409), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (6402, 6409), True, 'import matplotlib.pyplot as plt\n'), ((6412, 6438), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', 'pitch.ceiling'], {}), '(0, pitch.ceiling)\n', (6420, 6438), True, 'import matplotlib.pyplot as plt\n'), ((6441, 6481), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""fundamental frequency [Hz]"""'], {}), "('fundamental frequency [Hz]')\n", (6451, 6481), True, 'import matplotlib.pyplot as plt\n'), ((6487, 6529), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[snd.xmin - 0.2, snd.xmax + 0.2]'], {}), '([snd.xmin - 0.2, snd.xmax + 0.2])\n', (6495, 6529), True, 'import matplotlib.pyplot as plt\n'), ((6549, 6566), 'matplotlib.pyplot.savefig', 'plt.savefig', (['name'], {}), '(name)\n', (6560, 6566), True, 'import matplotlib.pyplot as plt\n'), ((6740, 6797), 'flask.jsonify', 'jsonify', (["{'normal': dataPoints, 'highlight': dataPoints2}"], {}), "({'normal': dataPoints, 'highlight': dataPoints2})\n", (6747, 6797), False, 'from flask import Flask, jsonify, request\n'), ((6952, 6999), 'flask.request.environ.get', 'request.environ.get', (['"""werkzeug.server.shutdown"""'], {}), "('werkzeug.server.shutdown')\n", (6971, 6999), False, 'from flask import Flask, jsonify, request\n')]
from overrides import overrides from claf.config.registry import Registry from claf.config.utils import convert_config2dict from claf.tokens import tokenizer from .base import Factory def make_tokenizer(tokenizer_cls, tokenizer_config, parent_tokenizers={}): if tokenizer_config is None or "name" not in tokenizer_config: return None package_name = tokenizer_config["name"] package_config = tokenizer_config.get(package_name, {}) tokenizer_config["config"] = package_config if package_name in tokenizer_config: del tokenizer_config[package_name] tokenizer_config.update(parent_tokenizers) return tokenizer_cls(**tokenizer_config) def make_all_tokenizers(all_tokenizer_config): """ Tokenizer is resource used all token together """ sent_tokenizer = make_tokenizer( tokenizer.SentTokenizer, all_tokenizer_config.get("sent", {"name": "punkt"}) ) word_tokenizer = make_tokenizer( tokenizer.WordTokenizer, all_tokenizer_config.get("word", None), parent_tokenizers={"sent_tokenizer": sent_tokenizer}, ) subword_tokenizer = make_tokenizer( tokenizer.SubwordTokenizer, all_tokenizer_config.get("subword", None), parent_tokenizers={"word_tokenizer": word_tokenizer}, ) char_tokenizer = make_tokenizer( tokenizer.CharTokenizer, all_tokenizer_config.get("char", None), parent_tokenizers={"word_tokenizer": word_tokenizer}, ) return { "char": char_tokenizer, "subword": subword_tokenizer, "word": word_tokenizer, "sent": sent_tokenizer, } class TokenMakersFactory(Factory): """ TokenMakers Factory Class * Args: config: token config from argument (config.token) """ LANGS = ["eng", "kor"] def __init__(self, config): self.config = config self.registry = Registry() @overrides def create(self): tokenizers = make_all_tokenizers(convert_config2dict(self.config.tokenizer)) token_names, token_types = self.config.names, self.config.types if len(token_names) != len(token_types): raise ValueError("token_names and token_types must be same length.") token_makers = {"tokenizers": tokenizers} for token_name, token_type in sorted(zip(token_names, token_types)): token_config = getattr(self.config, token_name, {}) if token_config != {}: token_config = convert_config2dict(token_config) # Token (tokenizer, indexer, embedding, vocab) token_config = { "tokenizers": tokenizers, "indexer_config": token_config.get("indexer", {}), "embedding_config": token_config.get("embedding", {}), "vocab_config": token_config.get("vocab", {}), } token_makers[token_name] = self.registry.get(f"token:{token_type}")(**token_config) return token_makers
[ "claf.config.registry.Registry", "claf.config.utils.convert_config2dict" ]
[((1907, 1917), 'claf.config.registry.Registry', 'Registry', ([], {}), '()\n', (1915, 1917), False, 'from claf.config.registry import Registry\n'), ((1997, 2039), 'claf.config.utils.convert_config2dict', 'convert_config2dict', (['self.config.tokenizer'], {}), '(self.config.tokenizer)\n', (2016, 2039), False, 'from claf.config.utils import convert_config2dict\n'), ((2502, 2535), 'claf.config.utils.convert_config2dict', 'convert_config2dict', (['token_config'], {}), '(token_config)\n', (2521, 2535), False, 'from claf.config.utils import convert_config2dict\n')]
import argparse from sensai_dataset.generator.commands import generate_dataset from sensai_dataset.generator.constants import DATASET_DIR, DATASET_SOURCE_DIR if __name__ == '__main__': parser = argparse.ArgumentParser(description='dataset generator') parser.add_argument('-m', '--matcher', type=str, default='chats_*.csv') args = parser.parse_args() print('target: ' + DATASET_DIR) print('source: ' + DATASET_SOURCE_DIR) generate_dataset(source_dir=DATASET_SOURCE_DIR, target_dir=DATASET_DIR, matcher=args.matcher)
[ "argparse.ArgumentParser", "sensai_dataset.generator.commands.generate_dataset" ]
[((200, 256), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""dataset generator"""'}), "(description='dataset generator')\n", (223, 256), False, 'import argparse\n'), ((449, 546), 'sensai_dataset.generator.commands.generate_dataset', 'generate_dataset', ([], {'source_dir': 'DATASET_SOURCE_DIR', 'target_dir': 'DATASET_DIR', 'matcher': 'args.matcher'}), '(source_dir=DATASET_SOURCE_DIR, target_dir=DATASET_DIR,\n matcher=args.matcher)\n', (465, 546), False, 'from sensai_dataset.generator.commands import generate_dataset\n')]
# Lint as: python2, python3 # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Read saved Decoder's outputs and convert to KITTI text format. First, obtain a KITTI camera calibration file. To export all detections from a single model: python export_kitti_detection.py \ --decoder_path=/path/to/decoder_out_000103000 \ --calib_file=/tmp/kitti_test_calibs.npz \ --output_dir=/tmp/my-kitti-export-directory \ --logtostderr --- OR --- Export combined detections selected from multiple models: python export_kitti_detection.py \ --car_decoder_path=/path/to/car_decoder_out \ --ped_decoder_path=/path/to/ped_decoder_out \ --cyc_decoder_path=/path/to/cyc_decoder_out \ --calib_file=/tmp/kitti_test_calibs.npz \ --output_dir=/tmp/my-kitti-export-directory \ --logtostderr """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import flags from lingvo import compat as tf from lingvo.core.ops import record_pb2 from lingvo.tasks.car import kitti_metadata from lingvo.tasks.car.tools import kitti_data import numpy as np FLAGS = flags.FLAGS flags.DEFINE_string( "decoder_path", None, "Paths to decoder file containing output " "of decoder for everything. Either supply this argument or individual " "decoder paths for cars, pedestrians and cyclists.") flags.DEFINE_string( "car_decoder_path", None, "Paths to decoder file containing output of decoder for cars." "Either supply plus cyclists and pedestrians or supply one " "decoder for all labels.") flags.DEFINE_string( "ped_decoder_path", None, "Paths to decoder file containing output of decoder for " "pedestrians. Either supply plus cyclists and cars or " "supply one decoder for all labels.") flags.DEFINE_string( "cyc_decoder_path", None, "Paths to decoder file containing output of decoder for cyclist. " "Either supply plus cars and pedestrians or supply one " "decoder for all labels.") flags.DEFINE_string( "calib_file", None, "Path to a npz file that contains all calibration matrices.") flags.DEFINE_string("output_dir", None, "Place to write detections.") flags.DEFINE_float("score_threshold", 0, "Ignore detections with lower score.") def LoadCalibData(fname): """Load and parse calibration data from NPZ file.""" # If this throws an error, make sure the npz file was generated from # the same version of python as this binary. npz = np.load(fname) scene_to_calib = {} for idx, scene_id in enumerate(npz["scene_id"]): tf.logging.info("Processing %s", scene_id) raw_calib = {} raw_calib["P0"] = npz["P0"][idx] raw_calib["P1"] = npz["P1"][idx] raw_calib["P2"] = npz["P2"][idx] raw_calib["P3"] = npz["P3"][idx] raw_calib["R0_rect"] = npz["R0_rect"][idx] raw_calib["Tr_velo_to_cam"] = npz["Tr_velo_to_cam"][idx] raw_calib["Tr_imu_to_velo"] = npz["Tr_imu_to_velo"][idx] calib = kitti_data.ParseCalibrationDict(raw_calib) scene_to_calib[scene_id] = calib return scene_to_calib def ExtractNpContent(np_dict, calib): """Parse saved np arrays and convert 3D bboxes to camera0 coordinates. Args: np_dict: a dict of numpy arrays. calib: a parsed calibration dictionary. Returns: A tuple of 6 ndarrays: - location_camera: [N, 3]. [x, y, z] in camera0 coordinate. - dimension_camera: [N, 3]. The [height, width, length] of objects. - phi_camera: [N]. Rotation around y-axis in camera0 coodinate. - bboxes_2d: [N, 4]. The corresponding 2D bboxes in the image coordinate. - scores: [N]. Confidence scores for each box for the assigned class. - class_ids: [N]. The class id assigned to each box. """ bboxes = np_dict["bboxes"] scores = np_dict["scores"] class_ids = np_dict["class_ids"] bboxes_2d = np_dict["bboxes_2d"] # Transform from velodyne coordinates to camera coordinates. velo_to_cam_transform = kitti_data.VeloToCameraTransformation(calib) location_cam = np.zeros((len(bboxes), 3)) dimension_cam = np.zeros((len(bboxes), 3)) rotation_cam = np.zeros((len(bboxes), 1)) for idx, bbox in enumerate(bboxes): location_cam[idx, :], dimension_cam[idx, :], rotation_cam[idx, :] = ( kitti_data.BBox3DToKITTIObject(bbox, velo_to_cam_transform)) return location_cam, dimension_cam, rotation_cam, bboxes_2d, scores, class_ids _INCLUDED_KITTI_CLASS_NAMES = ["Car", "Pedestrian", "Cyclist"] def ExportKITTIDetection(out_dir, source_id, location_cam, dimension_cam, rotation_cam, bboxes_2d, scores, class_name, is_first): """Write detections to a text file in KITTI format.""" tf.logging.info("Exporting %s for %s" % (class_name, source_id)) fname = out_dir + "/" + source_id + ".txt" with tf.gfile.Open(fname, "a") as fid: # Ensure we always create a file even when there's no detection. # TODO(shlens): Test whether this is actually necessary on the KITTI # eval server. if is_first: fid.write("") for location, dimension, ry, bbox_2d, score in zip( location_cam, dimension_cam, rotation_cam, bboxes_2d, scores): if score < FLAGS.score_threshold: continue # class_name, truncated(ignore), alpha(ignore), bbox2D x 4 part1 = [class_name, -1, -1, -10] + list(bbox_2d) # dimesion x 3, location x 3, rotation_y x 1, score x 1 fill = tuple(part1 + list(dimension) + list(location) + [ry] + [score]) kitti_format_string = ("%s %lf %lf %lf %lf %lf %lf %lf %lf %lf %lf %lf " "%lf %lf %lf %lf") kitti_line = kitti_format_string % fill fid.write(kitti_line + "\n") def main(argv): if len(argv) > 1: raise tf.app.UsageError("Too many command-line arguments.") if FLAGS.decoder_path: assert not FLAGS.car_decoder_path and not FLAGS.ped_decoder_path \ and not FLAGS.cyc_decoder_path, ("Either provide decoder_path or " "individual decoders but not both.") else: assert FLAGS.car_decoder_path and FLAGS.ped_decoder_path and \ FLAGS.cyc_decoder_path, ("No decoder_path specified. Please supply all " "individual decoder_paths for labels.") is_single_decoder_file = FLAGS.decoder_path is not None if is_single_decoder_file: list_of_decoder_paths = [FLAGS.decoder_path] else: # Note the correspondence between _INCLUDED_KITTI_CLASS_NAMES ordering and # this list. list_of_decoder_paths = [ FLAGS.car_decoder_path, FLAGS.ped_decoder_path, FLAGS.cyc_decoder_path ] # A list of dictionaries mapping img ids to a dictionary of numpy tensors. table_data = [] img_ids = [] for table_path in list_of_decoder_paths: img_id_dict = {} for serialized in tf.io.tf_record_iterator(table_path): record = record_pb2.Record() record.ParseFromString(serialized) img_id = str(tf.make_ndarray(record.fields["img_id"])) img_ids.append(img_id) np_dict = {k: tf.make_ndarray(v) for k, v in record.fields.items()} img_id_dict[img_id] = np_dict table_data.append(img_id_dict) img_ids = list(set(img_ids)) if not tf.gfile.Exists(FLAGS.output_dir): tf.gfile.MkDir(FLAGS.output_dir) all_kitti_class_names = kitti_metadata.KITTIMetadata().ClassNames() calib_data = LoadCalibData(tf.gfile.Open(FLAGS.calib_file, "rb")) count = 0 for img_id in img_ids: # Ignore padded samples where the img_ids are empty. if not img_id: continue for table_index, img_id_dict in enumerate(table_data): if img_id in img_id_dict: np_dict = img_id_dict[img_id] (location_cam, dimension_cam, rotation_cam, bboxes_2d, scores, class_ids) = ExtractNpContent(np_dict, calib_data[img_id + ".txt"]) if is_single_decoder_file: valid_labels = _INCLUDED_KITTI_CLASS_NAMES else: valid_labels = [_INCLUDED_KITTI_CLASS_NAMES[table_index]] is_first = table_index == 0 for class_name in valid_labels: class_mask = (class_ids == all_kitti_class_names.index(class_name)) ExportKITTIDetection(FLAGS.output_dir, img_id, location_cam[class_mask], dimension_cam[class_mask], rotation_cam[class_mask], bboxes_2d[class_mask], scores[class_mask], class_name, is_first) count += 1 tf.logging.info("Total example exported: %d", count) if __name__ == "__main__": tf.app.run(main)
[ "lingvo.tasks.car.kitti_metadata.KITTIMetadata", "lingvo.compat.gfile.Open", "numpy.load", "lingvo.compat.gfile.MkDir", "lingvo.tasks.car.tools.kitti_data.VeloToCameraTransformation", "lingvo.compat.app.run", "lingvo.compat.gfile.Exists", "absl.flags.DEFINE_string", "lingvo.compat.logging.info", "absl.flags.DEFINE_float", "lingvo.compat.io.tf_record_iterator", "lingvo.compat.app.UsageError", "lingvo.core.ops.record_pb2.Record", "lingvo.compat.make_ndarray", "lingvo.tasks.car.tools.kitti_data.ParseCalibrationDict", "lingvo.tasks.car.tools.kitti_data.BBox3DToKITTIObject" ]
[((1752, 1964), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""decoder_path"""', 'None', '"""Paths to decoder file containing output of decoder for everything. Either supply this argument or individual decoder paths for cars, pedestrians and cyclists."""'], {}), "('decoder_path', None,\n 'Paths to decoder file containing output of decoder for everything. Either supply this argument or individual decoder paths for cars, pedestrians and cyclists.'\n )\n", (1771, 1964), False, 'from absl import flags\n'), ((1975, 2174), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""car_decoder_path"""', 'None', '"""Paths to decoder file containing output of decoder for cars.Either supply plus cyclists and pedestrians or supply one decoder for all labels."""'], {}), "('car_decoder_path', None,\n 'Paths to decoder file containing output of decoder for cars.Either supply plus cyclists and pedestrians or supply one decoder for all labels.'\n )\n", (1994, 2174), False, 'from absl import flags\n'), ((2189, 2389), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""ped_decoder_path"""', 'None', '"""Paths to decoder file containing output of decoder for pedestrians. Either supply plus cyclists and cars or supply one decoder for all labels."""'], {}), "('ped_decoder_path', None,\n 'Paths to decoder file containing output of decoder for pedestrians. Either supply plus cyclists and cars or supply one decoder for all labels.'\n )\n", (2208, 2389), False, 'from absl import flags\n'), ((2404, 2603), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""cyc_decoder_path"""', 'None', '"""Paths to decoder file containing output of decoder for cyclist. Either supply plus cars and pedestrians or supply one decoder for all labels."""'], {}), "('cyc_decoder_path', None,\n 'Paths to decoder file containing output of decoder for cyclist. Either supply plus cars and pedestrians or supply one decoder for all labels.'\n )\n", (2423, 2603), False, 'from absl import flags\n'), ((2618, 2723), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""calib_file"""', 'None', '"""Path to a npz file that contains all calibration matrices."""'], {}), "('calib_file', None,\n 'Path to a npz file that contains all calibration matrices.')\n", (2637, 2723), False, 'from absl import flags\n'), ((2729, 2798), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""output_dir"""', 'None', '"""Place to write detections."""'], {}), "('output_dir', None, 'Place to write detections.')\n", (2748, 2798), False, 'from absl import flags\n'), ((2799, 2878), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""score_threshold"""', '(0)', '"""Ignore detections with lower score."""'], {}), "('score_threshold', 0, 'Ignore detections with lower score.')\n", (2817, 2878), False, 'from absl import flags\n'), ((3088, 3102), 'numpy.load', 'np.load', (['fname'], {}), '(fname)\n', (3095, 3102), True, 'import numpy as np\n'), ((4556, 4600), 'lingvo.tasks.car.tools.kitti_data.VeloToCameraTransformation', 'kitti_data.VeloToCameraTransformation', (['calib'], {}), '(calib)\n', (4593, 4600), False, 'from lingvo.tasks.car.tools import kitti_data\n'), ((5278, 5342), 'lingvo.compat.logging.info', 'tf.logging.info', (["('Exporting %s for %s' % (class_name, source_id))"], {}), "('Exporting %s for %s' % (class_name, source_id))\n", (5293, 5342), True, 'from lingvo import compat as tf\n'), ((9088, 9140), 'lingvo.compat.logging.info', 'tf.logging.info', (['"""Total example exported: %d"""', 'count'], {}), "('Total example exported: %d', count)\n", (9103, 9140), True, 'from lingvo import compat as tf\n'), ((9172, 9188), 'lingvo.compat.app.run', 'tf.app.run', (['main'], {}), '(main)\n', (9182, 9188), True, 'from lingvo import compat as tf\n'), ((3180, 3222), 'lingvo.compat.logging.info', 'tf.logging.info', (['"""Processing %s"""', 'scene_id'], {}), "('Processing %s', scene_id)\n", (3195, 3222), True, 'from lingvo import compat as tf\n'), ((3572, 3614), 'lingvo.tasks.car.tools.kitti_data.ParseCalibrationDict', 'kitti_data.ParseCalibrationDict', (['raw_calib'], {}), '(raw_calib)\n', (3603, 3614), False, 'from lingvo.tasks.car.tools import kitti_data\n'), ((4854, 4913), 'lingvo.tasks.car.tools.kitti_data.BBox3DToKITTIObject', 'kitti_data.BBox3DToKITTIObject', (['bbox', 'velo_to_cam_transform'], {}), '(bbox, velo_to_cam_transform)\n', (4884, 4913), False, 'from lingvo.tasks.car.tools import kitti_data\n'), ((5395, 5420), 'lingvo.compat.gfile.Open', 'tf.gfile.Open', (['fname', '"""a"""'], {}), "(fname, 'a')\n", (5408, 5420), True, 'from lingvo import compat as tf\n'), ((6328, 6381), 'lingvo.compat.app.UsageError', 'tf.app.UsageError', (['"""Too many command-line arguments."""'], {}), "('Too many command-line arguments.')\n", (6345, 6381), True, 'from lingvo import compat as tf\n'), ((7415, 7451), 'lingvo.compat.io.tf_record_iterator', 'tf.io.tf_record_iterator', (['table_path'], {}), '(table_path)\n', (7439, 7451), True, 'from lingvo import compat as tf\n'), ((7805, 7838), 'lingvo.compat.gfile.Exists', 'tf.gfile.Exists', (['FLAGS.output_dir'], {}), '(FLAGS.output_dir)\n', (7820, 7838), True, 'from lingvo import compat as tf\n'), ((7844, 7876), 'lingvo.compat.gfile.MkDir', 'tf.gfile.MkDir', (['FLAGS.output_dir'], {}), '(FLAGS.output_dir)\n', (7858, 7876), True, 'from lingvo import compat as tf\n'), ((7977, 8014), 'lingvo.compat.gfile.Open', 'tf.gfile.Open', (['FLAGS.calib_file', '"""rb"""'], {}), "(FLAGS.calib_file, 'rb')\n", (7990, 8014), True, 'from lingvo import compat as tf\n'), ((7468, 7487), 'lingvo.core.ops.record_pb2.Record', 'record_pb2.Record', ([], {}), '()\n', (7485, 7487), False, 'from lingvo.core.ops import record_pb2\n'), ((7904, 7934), 'lingvo.tasks.car.kitti_metadata.KITTIMetadata', 'kitti_metadata.KITTIMetadata', ([], {}), '()\n', (7932, 7934), False, 'from lingvo.tasks.car import kitti_metadata\n'), ((7548, 7588), 'lingvo.compat.make_ndarray', 'tf.make_ndarray', (["record.fields['img_id']"], {}), "(record.fields['img_id'])\n", (7563, 7588), True, 'from lingvo import compat as tf\n'), ((7639, 7657), 'lingvo.compat.make_ndarray', 'tf.make_ndarray', (['v'], {}), '(v)\n', (7654, 7657), True, 'from lingvo import compat as tf\n')]
# !/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function import argparse import datetime import json import math import os import random import time import numpy as np import torch import torch.optim as optim import torch.utils.data import compression from compression.utils import load_imagenet_data from optimization.training import train, evaluate random.seed(7610) parser = argparse.ArgumentParser(description='PyTorch Discrete Normalizing flows') parser.add_argument('--imagenet64_data_path', type=str, default='~/data/imagenet-small/train_64x64.npy') parser.add_argument('--imagenet64_valid_data_path', type=str, default='~/data/imagenet-small/valid_64x64.npy') parser.add_argument('--imagenet64_model', type=str, default=None) parser.add_argument('--state_parameters', type=str, default=None) parser.add_argument('--from_torch', action="store_true") parser.add_argument('--manual_seed', type=int, help='manual seed, if not given resorts to random seed.') parser.add_argument('--evaluate_interval_epochs', type=int, default=5, help='Evaluate per how many epochs') parser.add_argument('--snap_images', type=int, default=100000, help='Number of images to process on training before save snapshots') parser.add_argument('-od', '--out_dir', type=str, default='./', help='output directory for model snapshots etc.') # optimization settings parser.add_argument('-e', '--epochs', type=int, default=100, metavar='EPOCHS', help='number of epochs to train (default: 2000)') parser.add_argument('-bs', '--batch_size', type=int, default=2, metavar='BATCH_SIZE', help='input batch size for training (default: 100)') parser.add_argument('-lr', '--learning_rate', type=float, default=0.00001, metavar='LEARNING_RATE', help='learning rate') parser.add_argument('--step_size', default=10000, type=float, help='Number of batch iteration to update the learning rate') parser.add_argument('--gamma', default=0.1, type=float, help='Multiplicative factor of learning rate decay') args = parser.parse_args() if args.manual_seed is None: args.manual_seed = random.randint(1, 100000) random.seed(args.manual_seed) torch.manual_seed(args.manual_seed) np.random.seed(args.manual_seed) def run(args): print('\nMODEL SETTINGS: \n', args, '\n') print("Random Seed: ", args.manual_seed) # ================================================================================================================== # SNAPSHOTS # ================================================================================================================== args.model_signature = str(datetime.datetime.now())[0:19].replace(' ', '_') args.model_signature = args.model_signature.replace(':', '_') os.makedirs(args.out_dir, exist_ok=True) snap_dir = args.out_dir with open(os.path.join(snap_dir, 'log.txt'), 'a') as ff: print('\nMODEL SETTINGS: \n', args, '\n', file=ff) # SAVING torch.save(args, snap_dir + '.config') # Load snapshot parameters parameters_dict = None if args.state_parameters is not None: assert os.path.isfile(args.state_parameters) parameters_dict = json.load(open(args.state_parameters)) args.learning_rate = parameters_dict['scheduler']['_last_lr'][0] args.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") print('Device:', args.device) # ================================================================================================================== # LOAD DATA # ================================================================================================================== dataset = load_imagenet_data(os.path.expanduser(args.imagenet64_data_path)) validation_dataset = load_imagenet_data(os.path.expanduser(args.imagenet64_valid_data_path)) train_loader = torch.utils.data.DataLoader(dataset, batch_size=args.batch_size, shuffle=True, drop_last=False) val_loader = torch.utils.data.DataLoader(validation_dataset, batch_size=args.batch_size, shuffle=True, drop_last=False) # test_loader = torch.utils.data.DataLoader( # dataset, # batch_size=args.batch_size, # shuffle=False, # **kwargs) args.input_size = [3, 64, 64] # ================================================================================================================== # SELECT MODEL # ================================================================================================================== # flow parameters and architecture choice are passed on to model through args print(args.input_size) from compression.models.load_flowpp_imagenet64 import Imagenet64Model # Load model if args.imagenet64_model is None: model = Imagenet64Model(force_float32_cond=True).eval() else: model_ctor = compression.models.load_imagenet64_model model_filename = os.path.expanduser(args.imagenet64_model) model = model_ctor(model_filename, force_float32_cond=True, from_torch=args.from_torch) model.to(device=args.device) model_sample = model optimizer = optim.Adam(model.parameters(), lr=args.learning_rate) scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=args.step_size, gamma=args.gamma) # ================================================================================================================== # TRAINING # ================================================================================================================== train_bpd = [] val_bpd = [] # for early stopping best_val_bpd = np.inf best_val_loss = np.inf if args.state_parameters is None: last_epoch = 1 run_number = 1 else: last_epoch = parameters_dict['epoch'] run_number = parameters_dict['run_number'] + 1 scheduler.load_state_dict(parameters_dict['scheduler']) train_times = [] model.double() for epoch in range(last_epoch, args.epochs + 1): t_start = time.time() if parameters_dict is not None: tr_loss, tr_bpd = train(epoch, train_loader, model, optimizer, args, scheduler, True, parameters_dict['batch_idx'], run_number) else: tr_loss, tr_bpd = train(epoch, train_loader, model, optimizer, args, scheduler, False) train_bpd.append(tr_bpd) train_times.append(time.time() - t_start) print('One training epoch took %.2f seconds' % (time.time() - t_start)) if epoch < 5 or epoch % args.evaluate_interval_epochs == 0: v_loss, v_bpd = evaluate( val_loader, model, model_sample, args, epoch=epoch, file=snap_dir + 'log.txt') val_bpd.append(v_bpd) best_val_bpd = min(v_bpd, best_val_bpd) best_val_loss = min(v_loss, best_val_loss) print('(BEST: val bpd {:.4f}, val loss {:.4f})\n'.format(best_val_bpd, best_val_loss)) print(f'VALIDATION: loss: {v_loss}, bpd: {v_bpd}') if math.isnan(v_loss): raise ValueError('NaN encountered!') train_bpd = np.hstack(train_bpd) val_bpd = np.array(val_bpd) # training time per epoch train_times = np.array(train_times) mean_train_time = np.mean(train_times) std_train_time = np.std(train_times, ddof=1) print('Average train time per epoch: %.2f +/- %.2f' % (mean_train_time, std_train_time)) # ================================================================================================================== # EVALUATION # ================================================================================================================== final_model = torch.load(snap_dir + 'a.model') test_loss, test_bpd = evaluate( train_loader, test_loader, final_model, final_model, args, epoch=epoch, file=snap_dir + 'test_log.txt') print('Test loss / bpd: %.2f / %.2f' % (test_loss, test_bpd)) if __name__ == "__main__": run(args)
[ "numpy.random.seed", "argparse.ArgumentParser", "torch.optim.lr_scheduler.StepLR", "os.path.isfile", "numpy.mean", "optimization.training.train", "os.path.join", "optimization.training.evaluate", "random.randint", "torch.utils.data.DataLoader", "numpy.std", "torch.load", "random.seed", "compression.models.load_flowpp_imagenet64.Imagenet64Model", "datetime.datetime.now", "math.isnan", "torch.manual_seed", "numpy.hstack", "torch.cuda.is_available", "os.makedirs", "time.time", "torch.save", "numpy.array", "os.path.expanduser" ]
[((384, 401), 'random.seed', 'random.seed', (['(7610)'], {}), '(7610)\n', (395, 401), False, 'import random\n'), ((412, 485), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch Discrete Normalizing flows"""'}), "(description='PyTorch Discrete Normalizing flows')\n", (435, 485), False, 'import argparse\n'), ((2250, 2279), 'random.seed', 'random.seed', (['args.manual_seed'], {}), '(args.manual_seed)\n', (2261, 2279), False, 'import random\n'), ((2280, 2315), 'torch.manual_seed', 'torch.manual_seed', (['args.manual_seed'], {}), '(args.manual_seed)\n', (2297, 2315), False, 'import torch\n'), ((2316, 2348), 'numpy.random.seed', 'np.random.seed', (['args.manual_seed'], {}), '(args.manual_seed)\n', (2330, 2348), True, 'import numpy as np\n'), ((2224, 2249), 'random.randint', 'random.randint', (['(1)', '(100000)'], {}), '(1, 100000)\n', (2238, 2249), False, 'import random\n'), ((2867, 2907), 'os.makedirs', 'os.makedirs', (['args.out_dir'], {'exist_ok': '(True)'}), '(args.out_dir, exist_ok=True)\n', (2878, 2907), False, 'import os\n'), ((3075, 3113), 'torch.save', 'torch.save', (['args', "(snap_dir + '.config')"], {}), "(args, snap_dir + '.config')\n", (3085, 3113), False, 'import torch\n'), ((3976, 4076), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'drop_last': '(False)'}), '(dataset, batch_size=args.batch_size, shuffle=\n True, drop_last=False)\n', (4003, 4076), False, 'import torch\n'), ((4089, 4199), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['validation_dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'drop_last': '(False)'}), '(validation_dataset, batch_size=args.batch_size,\n shuffle=True, drop_last=False)\n', (4116, 4199), False, 'import torch\n'), ((5375, 5460), 'torch.optim.lr_scheduler.StepLR', 'optim.lr_scheduler.StepLR', (['optimizer'], {'step_size': 'args.step_size', 'gamma': 'args.gamma'}), '(optimizer, step_size=args.step_size, gamma=args.gamma\n )\n', (5400, 5460), True, 'import torch.optim as optim\n'), ((7336, 7356), 'numpy.hstack', 'np.hstack', (['train_bpd'], {}), '(train_bpd)\n', (7345, 7356), True, 'import numpy as np\n'), ((7371, 7388), 'numpy.array', 'np.array', (['val_bpd'], {}), '(val_bpd)\n', (7379, 7388), True, 'import numpy as np\n'), ((7438, 7459), 'numpy.array', 'np.array', (['train_times'], {}), '(train_times)\n', (7446, 7459), True, 'import numpy as np\n'), ((7482, 7502), 'numpy.mean', 'np.mean', (['train_times'], {}), '(train_times)\n', (7489, 7502), True, 'import numpy as np\n'), ((7524, 7551), 'numpy.std', 'np.std', (['train_times'], {'ddof': '(1)'}), '(train_times, ddof=1)\n', (7530, 7551), True, 'import numpy as np\n'), ((7923, 7955), 'torch.load', 'torch.load', (["(snap_dir + 'a.model')"], {}), "(snap_dir + 'a.model')\n", (7933, 7955), False, 'import torch\n'), ((7982, 8099), 'optimization.training.evaluate', 'evaluate', (['train_loader', 'test_loader', 'final_model', 'final_model', 'args'], {'epoch': 'epoch', 'file': "(snap_dir + 'test_log.txt')"}), "(train_loader, test_loader, final_model, final_model, args, epoch=\n epoch, file=snap_dir + 'test_log.txt')\n", (7990, 8099), False, 'from optimization.training import train, evaluate\n'), ((3230, 3267), 'os.path.isfile', 'os.path.isfile', (['args.state_parameters'], {}), '(args.state_parameters)\n', (3244, 3267), False, 'import os\n'), ((3812, 3857), 'os.path.expanduser', 'os.path.expanduser', (['args.imagenet64_data_path'], {}), '(args.imagenet64_data_path)\n', (3830, 3857), False, 'import os\n'), ((3903, 3954), 'os.path.expanduser', 'os.path.expanduser', (['args.imagenet64_valid_data_path'], {}), '(args.imagenet64_valid_data_path)\n', (3921, 3954), False, 'import os\n'), ((5090, 5131), 'os.path.expanduser', 'os.path.expanduser', (['args.imagenet64_model'], {}), '(args.imagenet64_model)\n', (5108, 5131), False, 'import os\n'), ((6202, 6213), 'time.time', 'time.time', ([], {}), '()\n', (6211, 6213), False, 'import time\n'), ((2951, 2984), 'os.path.join', 'os.path.join', (['snap_dir', '"""log.txt"""'], {}), "(snap_dir, 'log.txt')\n", (2963, 2984), False, 'import os\n'), ((3448, 3473), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (3471, 3473), False, 'import torch\n'), ((6284, 6397), 'optimization.training.train', 'train', (['epoch', 'train_loader', 'model', 'optimizer', 'args', 'scheduler', '(True)', "parameters_dict['batch_idx']", 'run_number'], {}), "(epoch, train_loader, model, optimizer, args, scheduler, True,\n parameters_dict['batch_idx'], run_number)\n", (6289, 6397), False, 'from optimization.training import train, evaluate\n'), ((6474, 6542), 'optimization.training.train', 'train', (['epoch', 'train_loader', 'model', 'optimizer', 'args', 'scheduler', '(False)'], {}), '(epoch, train_loader, model, optimizer, args, scheduler, False)\n', (6479, 6542), False, 'from optimization.training import train, evaluate\n'), ((6803, 6894), 'optimization.training.evaluate', 'evaluate', (['val_loader', 'model', 'model_sample', 'args'], {'epoch': 'epoch', 'file': "(snap_dir + 'log.txt')"}), "(val_loader, model, model_sample, args, epoch=epoch, file=snap_dir +\n 'log.txt')\n", (6811, 6894), False, 'from optimization.training import train, evaluate\n'), ((7246, 7264), 'math.isnan', 'math.isnan', (['v_loss'], {}), '(v_loss)\n', (7256, 7264), False, 'import math\n'), ((4945, 4985), 'compression.models.load_flowpp_imagenet64.Imagenet64Model', 'Imagenet64Model', ([], {'force_float32_cond': '(True)'}), '(force_float32_cond=True)\n', (4960, 4985), False, 'from compression.models.load_flowpp_imagenet64 import Imagenet64Model\n'), ((6603, 6614), 'time.time', 'time.time', ([], {}), '()\n', (6612, 6614), False, 'import time\n'), ((2747, 2770), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2768, 2770), False, 'import datetime\n'), ((6682, 6693), 'time.time', 'time.time', ([], {}), '()\n', (6691, 6693), False, 'import time\n')]
#!/usr/bin/env python3 """tests for histy.py""" import os import random import re import string from subprocess import getstatusoutput prg = './histy.py' # -------------------------------------------------- def test_usage(): """usage""" for flag in ['', '-h', '--help']: rv, out = getstatusoutput('{} {}'.format(prg, flag)) assert (rv > 0) if flag == '' else (rv == 0) assert re.match("usage", out, re.IGNORECASE) # -------------------------------------------------- def run(file, args, expected_out): """test""" in_file = os.path.join('../inputs', '{}'.format(file)) expected = open(os.path.join('test-outs', expected_out)).read().rstrip() rv, out = getstatusoutput('{} {} {}'.format(prg, args, in_file)) assert rv == 0 assert expected == out.rstrip() # -------------------------------------------------- def test_01(): """test""" run('fox.txt', '', 'fox.txt.1') # -------------------------------------------------- def test_02(): """test""" run('fox.txt', '-i', 'fox.txt.2') # -------------------------------------------------- def test_03(): """test""" run('fox.txt', "-c '!'", 'fox.txt.3') # -------------------------------------------------- def test_04(): """test""" run('sonnet-29.txt', "-m 2", 'sonnet-29.txt.1') # -------------------------------------------------- def test_05(): """test""" run('sonnet-29.txt', "-w 50 -m 2 -f -c '$'", 'sonnet-29.txt.2')
[ "os.path.join", "re.match" ]
[((413, 450), 're.match', 're.match', (['"""usage"""', 'out', 're.IGNORECASE'], {}), "('usage', out, re.IGNORECASE)\n", (421, 450), False, 'import re\n'), ((636, 675), 'os.path.join', 'os.path.join', (['"""test-outs"""', 'expected_out'], {}), "('test-outs', expected_out)\n", (648, 675), False, 'import os\n')]
#!/usr/bin/env python3 from pymoos import pymoos import time import matplotlib.pyplot as plt import numpy as np import threading fig, ax = plt.subplots(subplot_kw=dict(polar=True)) ax.set_theta_direction(-1) ax.set_theta_zero_location('N') nav_line, des_line, = ax.plot([], [], 'r', [], [], 'b') nav_line.set_label('NAV') des_line.set_label('DESIRED') ax.legend() class plotter(pymoos.comms): """plotter is a simple app that connects to MOOSDB and plots data.""" def __init__(self, moos_community, moos_port): """Initiates MOOSComms, sets the callbacks and runs the loop""" super(plotter, self).__init__() self.server = moos_community self.port = moos_port self.name = 'plotter' self.d_heading = 0 self.d_speed = 0 self.n_heading = 0 self.n_speed = 0 # getting a lock to threadsafely draw self.lock = threading.Lock() self.set_on_connect_callback(self.__on_connect) self.set_on_mail_callback(self.__on_new_mail) self.add_active_queue('nav_queue', self.on_nav) self.add_message_route_to_active_queue('nav_queue', 'NAV_HEADING') self.add_message_route_to_active_queue('nav_queue', 'NAV_SPEED') self.add_active_queue('desired_queue', self.on_desired) self.add_message_route_to_active_queue('desired_queue', 'DESIRED_HEADING') self.add_message_route_to_active_queue('desired_queue', 'DESIRED_SPEED') self.run(self.server, self.port, self.name) def __on_connect(self): """OnConnect callback""" print("Connected to", self.server, self.port, "under the name ", self.name) return (self.register("NAV_SPEED", 0) and self.register("NAV_HEADING", 0) and self.register("DESIRED_SPEED", 0) and self.register("DESIRED_HEADING", 0)) def __on_new_mail(self): """OnNewMail callback""" for msg in self.fetch(): print("Unhandled mail received:", msg.key(), "!") return True def on_nav(self, msg): """Special callback for NAV_*""" print("on_nav activated by", msg.key(), "with value", msg.double()) if msg.key() == 'NAV_HEADING': self.n_heading = msg.double() elif msg.key() == 'NAV_SPEED': self.n_speed = msg.double() r = np.arange(0, self.n_speed, 0.1) theta = np.deg2rad(self.n_heading) self.lock.acquire() try: nav_line.set_xdata(theta) nav_line.set_ydata(r) ax.set_rmax(5) plt.draw() finally: self.lock.release() return True def on_desired(self, msg): """Special callback for DESIRED_*""" print("on_desired activated by", msg.key(), "with value", msg.double()) if msg.key() == 'DESIRED_HEADING': self.d_heading = msg.double() elif msg.key() == 'DESIRED_SPEED': self.d_speed = msg.double() r = np.arange(0, self.d_speed, 0.1) theta = np.deg2rad(self.d_heading) self.lock.acquire() try: des_line.set_xdata(theta) des_line.set_ydata(r) ax.set_rmax(5) plt.draw() finally: self.lock.release() return True def main(): plottr = plotter('localhost', 9000) plt.show() if __name__=="__main__": main()
[ "matplotlib.pyplot.show", "numpy.deg2rad", "threading.Lock", "matplotlib.pyplot.draw", "numpy.arange" ]
[((3430, 3440), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3438, 3440), True, 'import matplotlib.pyplot as plt\n'), ((906, 922), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (920, 922), False, 'import threading\n'), ((2401, 2432), 'numpy.arange', 'np.arange', (['(0)', 'self.n_speed', '(0.1)'], {}), '(0, self.n_speed, 0.1)\n', (2410, 2432), True, 'import numpy as np\n'), ((2449, 2475), 'numpy.deg2rad', 'np.deg2rad', (['self.n_heading'], {}), '(self.n_heading)\n', (2459, 2475), True, 'import numpy as np\n'), ((3062, 3093), 'numpy.arange', 'np.arange', (['(0)', 'self.d_speed', '(0.1)'], {}), '(0, self.d_speed, 0.1)\n', (3071, 3093), True, 'import numpy as np\n'), ((3110, 3136), 'numpy.deg2rad', 'np.deg2rad', (['self.d_heading'], {}), '(self.d_heading)\n', (3120, 3136), True, 'import numpy as np\n'), ((2629, 2639), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (2637, 2639), True, 'import matplotlib.pyplot as plt\n'), ((3290, 3300), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (3298, 3300), True, 'import matplotlib.pyplot as plt\n')]
import os import configparser import logging import cx_Oracle import sqlparse import sys log = logging.getLogger() log.setLevel(logging.INFO) SCRIPTS_FOLDER_BASE = "../db/" INIT_FOLDER_PATH = SCRIPTS_FOLDER_BASE + "init/" ADDITIONS_FOLDER_PATH = SCRIPTS_FOLDER_BASE + "additions/" CONFIG_FILE_PATH = "../connection.ini" class Operator: def __init__(self): self.config_file = None self.connection = self._create_connection(self._read_db_configuration()) self.initfile, self.additionsfile = self._init_migration_table() @staticmethod def _create_connection(connection_string): if connection_string is None: log.critical("Got an error while trying to build the connection string." " Please check your values and file formatting for file \'connection.ini\"") return else: try: connection = cx_Oracle.connect(connection_string) return connection except Exception as e: log.critical("Unexpected Error: ", e) sys.exit("Connection Could not established with the database") def _read_db_configuration(self): """ :return: Database Connection string """ self.config_file = configparser.ConfigParser() self.config_file.read(CONFIG_FILE_PATH) if 'Database' in self.config_file: database_config = self.config_file['Database'] username = database_config.get('username') password = database_config.get('password') url = database_config.get('url') port = database_config.getint('port') service = database_config.get('service') if username is None or password is None or url is None or port is None or service is None: log.error( "One of the entries were not valid Please check the values and enter them in the following format: " "\n eg. \n[Database] \nurl = localhost\nport = 1521\nusername = testuser\npassword = " "sales\nservice = " "xe\n") return None else: return "{0}/{1}@{2}:{3}/{4}".format(username, password, url, port, service) else: log.error( "Database configuration section not found. Please add database configuration to \"connection.ini\' " "file. eg. \n[Database] \nurl = localhost\nport = 1521\nusername = testuser\npassword = " "sales\nservice = " "xe\n") return None def _init_migration_table(self): cursor = self.connection.cursor() query = "SELECT * from DATA_MIGRATION" init = 'init' additions = 'additions' try: result = cursor.execute(query).fetchall() cursor.close() self.connection.commit() except Exception as e: log.error("Exception occurred while fetching data from DATA_MIGRATION TABLE. \n {0}".format(e)) return init, additions if len(result) > 0: # Values are written in the database. Get the last file value for init and additions folder for value in result: init = value[1] additions = value[2] print("ID: {0}, LASTFILEINIT: {1}, LASTFILEADDITIONS: {2}".format(value[0], value[1], value[2])) return init, additions def start_operations(self): print("********************************************* \n*********************************************") print("Starting Init Folder") self._execute_init_scripts() print("Completed Init Folder") print("********************************************* \n*********************************************") print("Starting other scripts") print("********************************************* \n*********************************************") self._execute_added_scripts() print("Completed all scripts\nDatabase migration has been completed. \n" "Check the console if there were any malformed queries that were skipped.") print("********************************************* \n*********************************************") print("********************************************* \n*********************************************") def _execute_added_scripts(self): list_of_files = self._get_sorted_file_list_from_folder(ADDITIONS_FOLDER_PATH, self.additionsfile) if list_of_files is not None: print("found files") self._perform_sql_operations(ADDITIONS_FOLDER_PATH, list_of_files, 'additions') def _execute_init_scripts(self): print("Checking files in init folder") list_of_files = self._get_sorted_file_list_from_folder(INIT_FOLDER_PATH, self.initfile) if list_of_files is not None: print("found files") self._perform_sql_operations(INIT_FOLDER_PATH, list_of_files, 'init') @staticmethod def _get_sorted_file_list_from_folder(folder_path, lastfile=None): sql_file_list = os.listdir(folder_path) sql_file_list.sort(reverse=True) if lastfile is not None or lastfile != 'init' or lastfile != 'additions': trimmed_names = [] print("Last file value is not None. Trimming files after this file") for filename in sql_file_list: if filename == lastfile: trimmed_names.sort() return trimmed_names else: trimmed_names.append(filename) trimmed_names.sort() return trimmed_names else: sql_file_list.sort() return sql_file_list def _perform_sql_operations(self, folder_path, files, section): for file in files: if file.endswith('.sql'): read_file = open(folder_path + file, 'r') sql_file = read_file.read() read_file.close() if len(sql_file) > 0: sql_commands = sqlparse.split(sql_file) for query in sql_commands: if len(query) > 0: last_char = query[-1:] if last_char == ";": query = query[:-1] cursor = self.connection.cursor() try: cursor.execute(query) cursor.close() self.connection.commit() # print("Successfully executed query: {0}".format(query)) except Exception as e: log.warning(" Skipping the query : {0}\n Due to error: {1} \n".format(query, e)) self._write_to_config_file(section, file) print("Finished file {0}".format(file)) def _write_to_config_file(self, section_name, lastfile_name): if section_name == "init": query = f"UPDATE DATA_MIGRATION SET LASTFILEINIT = '{lastfile_name}' WHERE ID = 1" else: query = f"UPDATE DATA_MIGRATION SET LASTFILEADDITIONS = '{lastfile_name}' WHERE ID = 1" cursor = self.connection.cursor() try: cursor.execute(query) cursor.close() self.connection.commit() except Exception as e: log.critical("Error saving the last file executed to the database.\n Please check the logs and update the " "database entry manually \n Exception: {0}".format(e)) def destruct(self): if self.connection is not None: self.connection.close() if __name__ == '__main__': operator = Operator() operator.start_operations() operator.destruct()
[ "sqlparse.split", "logging.getLogger", "cx_Oracle.connect", "configparser.ConfigParser", "os.listdir", "sys.exit" ]
[((96, 115), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (113, 115), False, 'import logging\n'), ((1298, 1325), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1323, 1325), False, 'import configparser\n'), ((5228, 5251), 'os.listdir', 'os.listdir', (['folder_path'], {}), '(folder_path)\n', (5238, 5251), False, 'import os\n'), ((923, 959), 'cx_Oracle.connect', 'cx_Oracle.connect', (['connection_string'], {}), '(connection_string)\n', (940, 959), False, 'import cx_Oracle\n'), ((1100, 1162), 'sys.exit', 'sys.exit', (['"""Connection Could not established with the database"""'], {}), "('Connection Could not established with the database')\n", (1108, 1162), False, 'import sys\n'), ((6220, 6244), 'sqlparse.split', 'sqlparse.split', (['sql_file'], {}), '(sql_file)\n', (6234, 6244), False, 'import sqlparse\n')]
import numpy as np from random import random import math class Path: def __init__(self, r): self.radius = r self.path = [] def circleDiscretization(self, qtd_poits = 40): self.path = [] angle_diff = 2 * math.pi / qtd_poits for i in range(qtd_poits): point = (self.radius * math.cos(i * angle_diff) + 85, self.radius * math.sin(i * angle_diff) + 65) self.path.append(point) return self.path def getPath(self): return self.path[1:] + [self.path[0]] def pointDistance(self, x1, y1, x2, y2): return math.sqrt( (x1-x2)**2 + (y1-y2)**2) def area(self): return math.pi * self.radius * self.radius def getSimpleError(self, path, robotPath, N=1): totalError = 0.0 for i in range(len(path) - 1): totalError += self.getPointError(path[i], path[i+1], robotPath[i])[2] totalError += self.getPointError(path[i], path[1], robotPath[i])[2] return totalError/N def getPointError(self, startPoint, endPoint, robotPos): start, end, robot= np.array(startPoint), np.array(endPoint), np.array(robotPos) middle = (start + end)/ 2.0 errorS = np.sqrt(((start - robot) ** 2).sum(0)) errorE = np.sqrt(((end - robot) ** 2).sum(0)) errorM = np.sqrt(((middle - robot) ** 2).sum(0)) return errorS, errorE, errorM def tD2I(self, tup): return (int(tup[0]), int(tup[1])) def getPerimeterError(self, startPoint, endPoint, robotPath, src=''): path = [startPoint] + robotPath + [endPoint] pathNp = np.array(path) totalError = 0.0 for i in range(len (pathNp) - 1): totalError += np.sqrt(((pathNp[i] - pathNp[i+1]) ** 2).sum(0)) return totalError # if __name__ == '__main__': # height, width = 480, 640 # radius = 150 # angleShift = math.radians(20) # src = np.zeros(shape=(height, width, 3),dtype=np.uint8) # center = (width//2, height//2) # cv2.circle(src, center, radius, (0,0,255),1) # path = [] # robotPath = [] # for i in np.arange(0, angleShift * 2, angleShift): # x = (width/2 + math.cos(i) * radius) # y = (height/2 + math.sin(i) * radius) # cv2.circle(src, (int(x), int(y)), 1, (255, 0, 0), 1) # xn = (width/2 + math.cos(i + angleShift/4.0) * radius) # yn = (height/2 + math.sin(i + angleShift/4.0) * radius) # xr = xn + (random() - 0.5) * radius/2.5 # yr = yn + (random() - 0.5) * radius/2.5 # robotPath.append((xr, yr)) # cv2.circle(src, (int(xr), int(yr)), 1, (0, 255, 0), 1) # xn = (width/2 + math.cos(i + angleShift/2.0) * radius) # yn = (height/2 + math.sin(i + angleShift/2.0) * radius) # xr = xn + (random() - 0.5) * radius/2.5 # yr = yn + (random() - 0.5) * radius/2.5 # robotPath.append((xr, yr)) # cv2.circle(src, (int(xr), int(yr)), 1, (0, 255, 0), 1) # path.append((x,y)) # ptError = PathError() # print (ptError.getPerimeterError(path[0], path[1], robotPath, src)) # cv2.namedWindow("ErrorWin", cv2.WINDOW_NORMAL) # cv2.imshow("ErrorWin",src) # cv2.waitKey(0) # cv2.destroyAllWindows()
[ "math.sin", "numpy.array", "math.cos", "math.sqrt" ]
[((614, 656), 'math.sqrt', 'math.sqrt', (['((x1 - x2) ** 2 + (y1 - y2) ** 2)'], {}), '((x1 - x2) ** 2 + (y1 - y2) ** 2)\n', (623, 656), False, 'import math\n'), ((1655, 1669), 'numpy.array', 'np.array', (['path'], {}), '(path)\n', (1663, 1669), True, 'import numpy as np\n'), ((1132, 1152), 'numpy.array', 'np.array', (['startPoint'], {}), '(startPoint)\n', (1140, 1152), True, 'import numpy as np\n'), ((1154, 1172), 'numpy.array', 'np.array', (['endPoint'], {}), '(endPoint)\n', (1162, 1172), True, 'import numpy as np\n'), ((1174, 1192), 'numpy.array', 'np.array', (['robotPos'], {}), '(robotPos)\n', (1182, 1192), True, 'import numpy as np\n'), ((338, 362), 'math.cos', 'math.cos', (['(i * angle_diff)'], {}), '(i * angle_diff)\n', (346, 362), False, 'import math\n'), ((383, 407), 'math.sin', 'math.sin', (['(i * angle_diff)'], {}), '(i * angle_diff)\n', (391, 407), False, 'import math\n')]
#! /usr/bin/env python import random import numpy as np class Environment: def __init__(self, size=[3,4], start=(0,0), end=(2,3), block=[(1,1)], false_end=(1,3)): self.size = size self.state = np.zeros(self.size) self.action_space = self.generate_action_space() self.state_space = self.generate_state_space() self.agent_position = start def generate_action_space(self): a_keys = ['<KEY>'] a_values = [(1, 0),(-1, 0),(0, -1),(0, 1)] return {a_keys[i]:a_values[i] for i in range(len(a_keys))} def generate_state_space(self): state = {} for i in range(self.size[0]): for j in range(self.size[1]): state[(i,j)] = list(self.action_space) return state def generate_default_probability(self): p_keys = self.state_space.keys() p_values = [0.25, 0.25, 0.25, 0.25] return {p_keys[i]:p_values[i] for i in range(len(p_keys))} def get_state_action_hash(self, curr_pos): hash_vals = [] for action in self.state_space[curr_pos]: str_val = str(str(curr_pos)+'-'+action) hash_vals.append(str_val) return hash_vals if __name__ == "__main__": env = Environment()
[ "numpy.zeros" ]
[((215, 234), 'numpy.zeros', 'np.zeros', (['self.size'], {}), '(self.size)\n', (223, 234), True, 'import numpy as np\n')]
import torch from torch.optim.optimizer import Optimizer, required from torch import optim, nn import torch.optim._functional as F from agc_optims.utils import agc class RMSprop_AGC(Optimizer): r"""Implements RMSprop algorithm with adaptive gradient clipping (AGC). .. math:: \begin{aligned} &\rule{110mm}{0.4pt} \\ &\textbf{input} : \alpha \text{ (alpha)},\: \gamma \text{ (lr)}, \: \theta_0 \text{ (params)}, \: f(\theta) \text{ (objective)} \\ &\hspace{13mm} \lambda \text{ (weight decay)},\: \mu \text{ (momentum)},\: centered\\ &\textbf{initialize} : v_0 \leftarrow 0 \text{ (square average)}, \: \textbf{b}_0 \leftarrow 0 \text{ (buffer)}, \: g^{ave}_0 \leftarrow 0 \\[-1.ex] &\rule{110mm}{0.4pt} \\ &\textbf{for} \: t=1 \: \textbf{to} \: \ldots \: \textbf{do} \\ &\hspace{5mm}g_t \leftarrow \nabla_{\theta} f_t (\theta_{t-1}) \\ &\hspace{5mm}if \: \lambda \neq 0 \\ &\hspace{10mm} g_t \leftarrow g_t + \lambda \theta_{t-1} \\ &\hspace{5mm}v_t \leftarrow \alpha v_{t-1} + (1 - \alpha) g^2_t \hspace{8mm} \\ &\hspace{5mm} \tilde{v_t} \leftarrow v_t \\ &\hspace{5mm}if \: centered \\ &\hspace{10mm} g^{ave}_t \leftarrow g^{ave}_{t-1} \alpha + (1-\alpha) g_t \\ &\hspace{10mm} \tilde{v_t} \leftarrow \tilde{v_t} - \big(g^{ave}_{t} \big)^2 \\ &\hspace{5mm}if \: \mu > 0 \\ &\hspace{10mm} \textbf{b}_t\leftarrow \mu \textbf{b}_{t-1} + g_t/ \big(\sqrt{\tilde{v_t}} + \epsilon \big) \\ &\hspace{10mm} \theta_t \leftarrow \theta_{t-1} - \gamma \textbf{b}_t \\ &\hspace{5mm} else \\ &\hspace{10mm}\theta_t \leftarrow \theta_{t-1} - \gamma g_t/ \big(\sqrt{\tilde{v_t}} + \epsilon \big) \hspace{3mm} \\ &\rule{110mm}{0.4pt} \\[-1.ex] &\bf{return} \: \theta_t \\[-1.ex] &\rule{110mm}{0.4pt} \\[-1.ex] \end{aligned} For further details regarding the algorithm we refer to `lecture notes <https://www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf>`_ by <NAME>. and centered version `Generating Sequences With Recurrent Neural Networks <https://arxiv.org/pdf/1308.0850v5.pdf>`_. The implementation here takes the square root of the gradient average before adding epsilon (note that TensorFlow interchanges these two operations). The effective learning rate is thus :math:`\gamma/(\sqrt{v} + \epsilon)` where :math:`\gamma` is the scheduled learning rate and :math:`v` is the weighted moving average of the squared gradient. This implementation of RMSprop was taken from the official PyTorch Sources and the code for the AGC was adapted from https://github.com/vballoli/nfnets-pytorch. Args: params (iterable): iterable of parameters to optimize or dicts defining parameter groups lr (float, optional): learning rate (default: 1e-2) momentum (float, optional): momentum factor (default: 0) alpha (float, optional): smoothing constant (default: 0.99) eps (float, optional): term added to the denominator to improve numerical stability (default: 1e-8) centered (bool, optional) : if ``True``, compute the centered RMSProp, the gradient is normalized by an estimation of its variance weight_decay (float, optional): weight decay (L2 penalty) (default: 0) clipping (float, optional): clipping value for the AGC (default: 1e-2) agc_eps (float, optional): term used in agc to prevent grads clipped to zero (default: 1e-3) """ def __init__(self, params, lr=1e-2, alpha=0.99, eps=1e-8, weight_decay=0, momentum=0, centered=False, clipping=1e-2, agc_eps=1e-3): if not 0.0 <= lr: raise ValueError("Invalid learning rate: {}".format(lr)) if not 0.0 <= eps: raise ValueError("Invalid epsilon value: {}".format(eps)) if not 0.0 <= momentum: raise ValueError("Invalid momentum value: {}".format(momentum)) if not 0.0 <= weight_decay: raise ValueError("Invalid weight_decay value: {}".format(weight_decay)) if not 0.0 <= alpha: raise ValueError("Invalid alpha value: {}".format(alpha)) if not 0.0 <= clipping < 1.0: raise ValueError("Invalid clipping parameter: {}".format(clipping)) if not 0.0 <= agc_eps: raise ValueError("Invalid agc_eps value: {}".format(agc_eps)) defaults = dict(lr=lr, momentum=momentum, alpha=alpha, eps=eps, centered=centered, weight_decay=weight_decay, clipping=clipping, agc_eps=agc_eps) super(RMSprop_AGC, self).__init__(params, defaults) def __setstate__(self, state): super(RMSprop_AGC, self).__setstate__(state) for group in self.param_groups: group.setdefault('momentum', 0) group.setdefault('centered', False) @torch.no_grad() def step(self, closure=None): """Performs a single optimization step. Args: closure (callable, optional): A closure that reevaluates the model and returns the loss. """ loss = None if closure is not None: with torch.enable_grad(): loss = closure() for group in self.param_groups: params_with_grad = [] grads = [] square_avgs = [] grad_avgs = [] momentum_buffer_list = [] clipping = group['clipping'] agc_eps = group['agc_eps'] for p in group['params']: if p.grad is None: continue ## AGC agc(param=p, clipping=clipping, eps=agc_eps) params_with_grad.append(p) if p.grad.is_sparse: raise RuntimeError('RMSprop does not support sparse gradients') grads.append(p.grad) state = self.state[p] # State initialization if len(state) == 0: state['step'] = 0 state['square_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format) if group['momentum'] > 0: state['momentum_buffer'] = torch.zeros_like(p, memory_format=torch.preserve_format) if group['centered']: state['grad_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format) square_avgs.append(state['square_avg']) if group['momentum'] > 0: momentum_buffer_list.append(state['momentum_buffer']) if group['centered']: grad_avgs.append(state['grad_avg']) state['step'] += 1 F.rmsprop(params_with_grad, grads, square_avgs, grad_avgs, momentum_buffer_list, lr=group['lr'], alpha=group['alpha'], eps=group['eps'], weight_decay=group['weight_decay'], momentum=group['momentum'], centered=group['centered']) return loss
[ "torch.zeros_like", "torch.optim._functional.rmsprop", "torch.enable_grad", "torch.no_grad", "agc_optims.utils.agc" ]
[((5926, 5941), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5939, 5941), False, 'import torch\n'), ((7820, 8061), 'torch.optim._functional.rmsprop', 'F.rmsprop', (['params_with_grad', 'grads', 'square_avgs', 'grad_avgs', 'momentum_buffer_list'], {'lr': "group['lr']", 'alpha': "group['alpha']", 'eps': "group['eps']", 'weight_decay': "group['weight_decay']", 'momentum': "group['momentum']", 'centered': "group['centered']"}), "(params_with_grad, grads, square_avgs, grad_avgs,\n momentum_buffer_list, lr=group['lr'], alpha=group['alpha'], eps=group[\n 'eps'], weight_decay=group['weight_decay'], momentum=group['momentum'],\n centered=group['centered'])\n", (7829, 8061), True, 'import torch.optim._functional as F\n'), ((6236, 6255), 'torch.enable_grad', 'torch.enable_grad', ([], {}), '()\n', (6253, 6255), False, 'import torch\n'), ((6705, 6749), 'agc_optims.utils.agc', 'agc', ([], {'param': 'p', 'clipping': 'clipping', 'eps': 'agc_eps'}), '(param=p, clipping=clipping, eps=agc_eps)\n', (6708, 6749), False, 'from agc_optims.utils import agc\n'), ((7148, 7204), 'torch.zeros_like', 'torch.zeros_like', (['p'], {'memory_format': 'torch.preserve_format'}), '(p, memory_format=torch.preserve_format)\n', (7164, 7204), False, 'import torch\n'), ((7302, 7358), 'torch.zeros_like', 'torch.zeros_like', (['p'], {'memory_format': 'torch.preserve_format'}), '(p, memory_format=torch.preserve_format)\n', (7318, 7358), False, 'import torch\n'), ((7445, 7501), 'torch.zeros_like', 'torch.zeros_like', (['p'], {'memory_format': 'torch.preserve_format'}), '(p, memory_format=torch.preserve_format)\n', (7461, 7501), False, 'import torch\n')]
import sys sys.path.insert(0,'../input/shopee-competition-utils') from config import CFG from run_test import run_bert_test # choose which cuda to load model on CFG.DEVICE = 'cuda:0' CFG.BATCH_SIZE = 16 # choose which model with what hyperparameters to use CFG.BERT_MODEL_NAME = CFG.BERT_MODEL_NAMES[3] CFG.MARGIN = CFG.BERT_MARGINS[3] CFG.MODEL_PATH_BERT = f"{CFG.BERT_MODEL_NAME.rsplit('/', 1)[-1]}_epoch8-bs16x1_margin_{CFG.MARGIN}.pt" # start inference run_bert_test()
[ "sys.path.insert", "config.CFG.BERT_MODEL_NAME.rsplit", "run_test.run_bert_test" ]
[((11, 66), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../input/shopee-competition-utils"""'], {}), "(0, '../input/shopee-competition-utils')\n", (26, 66), False, 'import sys\n'), ((461, 476), 'run_test.run_bert_test', 'run_bert_test', ([], {}), '()\n', (474, 476), False, 'from run_test import run_bert_test\n'), ((364, 398), 'config.CFG.BERT_MODEL_NAME.rsplit', 'CFG.BERT_MODEL_NAME.rsplit', (['"""/"""', '(1)'], {}), "('/', 1)\n", (390, 398), False, 'from config import CFG\n')]
#!/usr/bin/env python3 from __future__ import print_function from __future__ import absolute_import import os, click import subprocess from taw.util import * from taw.taw import * # commands/subcommands # ============== # SSH COMMAND # ============== @taw.command("ssh") @click.argument('hostname', metavar='<host name>') @click.argument('sshargs', nargs=-1) @pass_global_parameters def ssh_cmd(params, hostname, sshargs): """ do SSH to a specified host """ ssh_like_call(params, 'ssh', hostname, sshargs) # ============== # MOSH COMMAND # ============== @taw.command("mosh") @click.argument('hostname', metavar='<host name>') @click.argument('moshargs', nargs=-1) @pass_global_parameters def mosh_cmd(params, hostname, moshargs): """ do MOSH to a specified host """ ssh_like_call(params, 'mosh', hostname, moshargs) # ============== # RSSH COMMAND # ============== @taw.command("rssh") @click.argument('hostname', metavar='<host name>') @click.argument('rsshargs', nargs=-1) @pass_global_parameters def rssh_cmd(params, hostname, rsshargs): """ do rSSH to a specified host """ ssh_like_call(params, 'rssh', hostname, rsshargs) # ============== # RSYNC COMMAND # ============== @taw.command("rsync") @click.argument('hostname', metavar='<host name>') @click.argument('rsshargs', nargs=-1) @pass_global_parameters def rsync_cmd(params, hostname, rsshargs): """ do rsync to a specified host """ ssh_like_call(params, 'rsync', hostname, rsshargs) # ============== # SCP COMMAND # ============== @taw.command("scp") @click.argument('src', nargs=-1) @click.argument('dst', nargs=1) @click.option('-i', 'key_file_path', help='SSH key file') @click.option('-p', 'preserve_flag', is_flag=True, help='preserve attrs') @click.option('-B', 'batch_flag', is_flag=True, help='batch mode') @click.option('-C', 'compression_flag', is_flag=True, help='enable compression') @click.option('-c', 'cypher', help='cypher type') @click.option('-l', 'limit_bandwidth', help='bandwidth limit in Kb/s') @click.option('-P', 'port', default=None, type=int, help='port number') @click.option('-r', 'recursive_flag', is_flag=True, help='recursive copy') @click.option('-q', 'quiet_flag', is_flag=True, help='quiet mode') # TODO: support -v/-vv/-vvv, -o, -F (, -1, -2, -3, -4, -6 at lower priority) @pass_global_parameters def scp_cmd(params, src, dst, key_file_path, preserve_flag, batch_flag, compression_flag, cypher, limit_bandwidth, port, recursive_flag, quiet_flag): """ do scp to/from a specified host """ args = ['scp'] if preserve_flag: args.append('-p') if batch_flag: args.append('-B') if compression_flag: args.append('-C') if cypher: args += ['-c', cypher] if limit_bandwidth: args += ['-l', limit_bandwidth] if port: args += ['-P', port] if recursive_flag: args.append('-r') if quiet_flag: args.append('-q') (dest_user, dest_host, dest_path) = decompose_rpath(dst) copying_local_to_remote = dest_host is not None if copying_local_to_remote: instance = convert_host_name_to_instance(dest_host) if instance.public_ip_address is None: error_exit("The instance has no public IP address") dest_host = instance.public_ip_address if dest_user == '_': dest_user = os.environ['USER'] if dest_user is None: dest_user = get_root_like_user_from_instance(instance) if key_file_path is None: key_file_path = os.path.join(os.path.expanduser("~/.ssh"), instance.key_name + ".pem") if os.path.exists(key_file_path): args += ['-i', key_file_path] else: print_info("Key file '%s' does not exist.\nThe default keys might be used" % key_file_path) args += list(src) + ["%s@%s:%s" % (dest_user, dest_host, dest_path)] else: # copying remote to local sources_arr = [decompose_rpath(i) for i in src] for host in sources_arr[1:]: if host[1] != sources_arr[0][1]: error_exit("Multiple source hosts are not supported.") if host[0] != sources_arr[0][0]: error_exit("Multiple source users are not supported.") instance = convert_host_name_to_instance(sources_arr[0][1]) if instance.public_ip_address is None: error_exit("The instance has no public IP address") src_host = instance.public_ip_address src_user = sources_arr[0][0] if src_user == '_': src_user = os.environ['USER'] if src_user is None: src_user = get_root_like_user_from_instance(instance) if key_file_path is None: key_file_path = os.path.join(os.path.expanduser("~/.ssh"), instance.key_name + ".pem") if os.path.exists(key_file_path): args += ['-i', key_file_path] else: print_info("Key file '%s' does not exist.\nThe default keys might be used" % key_file_path) args += ["%s@%s:%s" % (src_user, src_host, x[2]) for x in sources_arr] args.append(dst) if params.aws_dryrun: print(" ".join(args)) return try: subprocess.check_call(args) except: pass
[ "click.argument", "click.option", "os.path.exists", "os.path.expanduser", "subprocess.check_call" ]
[((277, 326), 'click.argument', 'click.argument', (['"""hostname"""'], {'metavar': '"""<host name>"""'}), "('hostname', metavar='<host name>')\n", (291, 326), False, 'import os, click\n'), ((328, 363), 'click.argument', 'click.argument', (['"""sshargs"""'], {'nargs': '(-1)'}), "('sshargs', nargs=-1)\n", (342, 363), False, 'import os, click\n'), ((593, 642), 'click.argument', 'click.argument', (['"""hostname"""'], {'metavar': '"""<host name>"""'}), "('hostname', metavar='<host name>')\n", (607, 642), False, 'import os, click\n'), ((644, 680), 'click.argument', 'click.argument', (['"""moshargs"""'], {'nargs': '(-1)'}), "('moshargs', nargs=-1)\n", (658, 680), False, 'import os, click\n'), ((915, 964), 'click.argument', 'click.argument', (['"""hostname"""'], {'metavar': '"""<host name>"""'}), "('hostname', metavar='<host name>')\n", (929, 964), False, 'import os, click\n'), ((966, 1002), 'click.argument', 'click.argument', (['"""rsshargs"""'], {'nargs': '(-1)'}), "('rsshargs', nargs=-1)\n", (980, 1002), False, 'import os, click\n'), ((1239, 1288), 'click.argument', 'click.argument', (['"""hostname"""'], {'metavar': '"""<host name>"""'}), "('hostname', metavar='<host name>')\n", (1253, 1288), False, 'import os, click\n'), ((1290, 1326), 'click.argument', 'click.argument', (['"""rsshargs"""'], {'nargs': '(-1)'}), "('rsshargs', nargs=-1)\n", (1304, 1326), False, 'import os, click\n'), ((1562, 1593), 'click.argument', 'click.argument', (['"""src"""'], {'nargs': '(-1)'}), "('src', nargs=-1)\n", (1576, 1593), False, 'import os, click\n'), ((1595, 1625), 'click.argument', 'click.argument', (['"""dst"""'], {'nargs': '(1)'}), "('dst', nargs=1)\n", (1609, 1625), False, 'import os, click\n'), ((1627, 1683), 'click.option', 'click.option', (['"""-i"""', '"""key_file_path"""'], {'help': '"""SSH key file"""'}), "('-i', 'key_file_path', help='SSH key file')\n", (1639, 1683), False, 'import os, click\n'), ((1685, 1757), 'click.option', 'click.option', (['"""-p"""', '"""preserve_flag"""'], {'is_flag': '(True)', 'help': '"""preserve attrs"""'}), "('-p', 'preserve_flag', is_flag=True, help='preserve attrs')\n", (1697, 1757), False, 'import os, click\n'), ((1759, 1824), 'click.option', 'click.option', (['"""-B"""', '"""batch_flag"""'], {'is_flag': '(True)', 'help': '"""batch mode"""'}), "('-B', 'batch_flag', is_flag=True, help='batch mode')\n", (1771, 1824), False, 'import os, click\n'), ((1826, 1905), 'click.option', 'click.option', (['"""-C"""', '"""compression_flag"""'], {'is_flag': '(True)', 'help': '"""enable compression"""'}), "('-C', 'compression_flag', is_flag=True, help='enable compression')\n", (1838, 1905), False, 'import os, click\n'), ((1907, 1955), 'click.option', 'click.option', (['"""-c"""', '"""cypher"""'], {'help': '"""cypher type"""'}), "('-c', 'cypher', help='cypher type')\n", (1919, 1955), False, 'import os, click\n'), ((1957, 2026), 'click.option', 'click.option', (['"""-l"""', '"""limit_bandwidth"""'], {'help': '"""bandwidth limit in Kb/s"""'}), "('-l', 'limit_bandwidth', help='bandwidth limit in Kb/s')\n", (1969, 2026), False, 'import os, click\n'), ((2028, 2098), 'click.option', 'click.option', (['"""-P"""', '"""port"""'], {'default': 'None', 'type': 'int', 'help': '"""port number"""'}), "('-P', 'port', default=None, type=int, help='port number')\n", (2040, 2098), False, 'import os, click\n'), ((2100, 2173), 'click.option', 'click.option', (['"""-r"""', '"""recursive_flag"""'], {'is_flag': '(True)', 'help': '"""recursive copy"""'}), "('-r', 'recursive_flag', is_flag=True, help='recursive copy')\n", (2112, 2173), False, 'import os, click\n'), ((2175, 2240), 'click.option', 'click.option', (['"""-q"""', '"""quiet_flag"""'], {'is_flag': '(True)', 'help': '"""quiet mode"""'}), "('-q', 'quiet_flag', is_flag=True, help='quiet mode')\n", (2187, 2240), False, 'import os, click\n'), ((3509, 3538), 'os.path.exists', 'os.path.exists', (['key_file_path'], {}), '(key_file_path)\n', (3523, 3538), False, 'import os, click\n'), ((4637, 4666), 'os.path.exists', 'os.path.exists', (['key_file_path'], {}), '(key_file_path)\n', (4651, 4666), False, 'import os, click\n'), ((5020, 5047), 'subprocess.check_call', 'subprocess.check_call', (['args'], {}), '(args)\n', (5041, 5047), False, 'import subprocess\n'), ((3440, 3468), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.ssh"""'], {}), "('~/.ssh')\n", (3458, 3468), False, 'import os, click\n'), ((4568, 4596), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.ssh"""'], {}), "('~/.ssh')\n", (4586, 4596), False, 'import os, click\n')]
# -*- coding: utf-8 -*- from model.group import Group import random import re def test_delete_some_group(app, db, check_ui): if len(db.get_group_list()) == 0: app.group.create(Group(name = "test")) old_groups = db.get_group_list() group = random.choice(old_groups) app.group.delete_group_by_id(group.id) new_groups = db.get_group_list() old_groups.remove(group) assert sorted(old_groups, key = Group.id_or_max) == sorted(new_groups, key = Group.id_or_max) # для отключаемой проверки, где сраниваем БД инфу с UI, надо преобразовать список, взятый из БД, - взять только id и name if check_ui: new_groups_ui = [] for i in new_groups: new_groups_ui.append(Group(id=i.id, name=re.sub(" ", " ", i.name.strip()))) assert sorted(new_groups_ui, key = Group.id_or_max) == sorted(app.group.get_group_list(), key = Group.id_or_max)
[ "random.choice", "model.group.Group" ]
[((260, 285), 'random.choice', 'random.choice', (['old_groups'], {}), '(old_groups)\n', (273, 285), False, 'import random\n'), ((189, 207), 'model.group.Group', 'Group', ([], {'name': '"""test"""'}), "(name='test')\n", (194, 207), False, 'from model.group import Group\n')]