hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
417k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
1 class
is_sharp_comment_removed
bool
1 class
f711a541d5f2a2472a7bd1c4c91ceb34f392f93b
8,449
py
Python
extra-credit/Testing Room Locking System in Hotels/incorrect_impl_testlock_can_unlock_with_partially_matching_keycard.py
rvprasad/software-testing-course
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
[ "CC-BY-4.0" ]
11
2018-02-08T05:23:28.000Z
2021-05-24T13:23:56.000Z
extra-credit/Testing Room Locking System in Hotels/incorrect_impl_testlock_can_unlock_with_partially_matching_keycard.py
rvprasad/software-testing-course
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
[ "CC-BY-4.0" ]
null
null
null
extra-credit/Testing Room Locking System in Hotels/incorrect_impl_testlock_can_unlock_with_partially_matching_keycard.py
rvprasad/software-testing-course
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
[ "CC-BY-4.0" ]
2
2020-09-15T08:51:22.000Z
2021-01-26T12:07:18.000Z
import re class Key(object): "Key used in keycards and locks" pass class KeyCard(object): "Keycard used to open a lock" def __init__(self, first_key, second_key): """ Constructs a KeyCard with the given keys Args: first_key: in the keycard to be created second_key: in the keycard to be created Raises: ValueError if any of the keys are not of type Key """ if not isinstance(first_key, Key): raise ValueError("First key is not of Key type") if not isinstance(second_key, Key): raise ValueError("Second key is not of Key type") self._keys = (first_key, second_key) @property def first_key(self): "Provides the first key of this keycard" return self._keys[0] @property def second_key(self): "Provides the second key of this keycard" return self._keys[1] class Lock(object): "Lock on a room door" def __init__(self, first_key, second_key): """ Constructs a Lock with the given keys Args: first_key: in the lock to be created second_key: in the lock to be created Raises: ValueError if any of the keys are not of type Key """ if not isinstance(first_key, Key): raise ValueError("First key is not of Key type") if not isinstance(second_key, Key): raise ValueError("Second key is not of Key type") self._keys = (first_key, second_key) def can_be_unlocked(self, keycard): """ Checks if this lock can be unlocked with the given keycard Return: True if the lock can be unlocked; False otherwise Raises: ValueError if keycard is not of KeyCard Type """ if not isinstance(keycard, KeyCard): raise ValueError("keycard is not of KeyCard type") return self._keys[0] == keycard.first_key and \ self._keys[1] == keycard.second_key class Room(object): "Room in a hotel" def __init__(self, room_number, lock): """ Constructs a Room with given number and lock Args: room_number: of this room. This has be to greater than 0. lock: of this room. Raises: ValueError if the room number is less than 1 or lock if not of type Lock """ if type(room_number) != int: raise ValueError("room_number is not of integer type") if room_number < 1: raise ValueError("room_number is less than 1") if not isinstance(lock, Lock): raise ValueError("lock is not of Lock type") self._number = room_number self._lock = lock @property def last_key(self): return self._last_key @last_key.setter def last_key(self, key): self._last_key = key @property def keys(self): k = self.last_key self.last_key = Key() return (k, self.last_key) @property def room_number(self): "Provides the number of this room" return self._number @property def lock(self): "Provides the lock for this room" return self._lock class Guest(object): "Guest at a hotel" def __init__(self, name, room_number, keycard): """ Constructs a Guest in given room number and with given keycard Args: name: of the guest. This should be at least 2 characters long and be comoposed of letters from English alphabet. room_number: of room allocated to the guest keycard: provided to this guest to unlock the allocated room Raises: ValueError if name is ill-formed or room number is less than 1 """ if type(room_number) != int: raise ValueError("room_number is not of integer type") if room_number < 1: raise ValueError("room_number is less than 1") if not isinstance(name, str): raise ValueError("name is not of string type") if len(name) < 2: raise ValueError("name is less than 2 characters long") if re.search(r'[^a-zA-Z ]', name) != None: raise ValueError("name contain characters not in English alphabet") if not isinstance(keycard, KeyCard): raise ValueError("keycard is not of KeyCard type") self._guest_name = name self._room_number = room_number self._keycard = keycard @property def guest_name(self): "Provides the name of this guest" return self._guest_name @property def keycard(self): "Provides the keycard of this guest" return self._keycard @property def room_number(self): "Provides the number of the room occupied by this guest" return self._room_number def is_checkedin(self, hotel): """ Checks if this guest is checked into this hotel Returns: True if this guest is checked in at the given hotel; False otherwise Raises: ValueError if hotel is not of Hotel type """ if not isinstance(hotel, Hotel): raise ValueError("hotel is not of Hotel type") return hotel.is_checkedin(self._guest_name) class FullCapacityError(RuntimeError): pass class Hotel(object): "Hotel" def __init__(self, N): "Constructs a Hotel with N rooms" if type(N) != int: raise ValueError("N is not of int type") if N < 10 or N > 1000: raise ValueError("N is not between 10 and 1000, both inclusive") self._name2guest = {} self._name2room = {} self._capacity = N self._empty_rooms = [] for i in range(1, N + 1): k = Key() r = Room(i, Lock(k, k)) r.last_key = k self._empty_rooms.append(r) def checkin(self, guest_name): """ Checks the guest into the hotel by allocating a room Return: the corresponding Guest Raises: ValueError if guest name is not of str type or is already checked in at this hotel """ if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") if guest_name in self._name2guest: raise ValueError( "guest named {0} is already checked in".format(guest_name)) if len(self._name2guest) >= self._capacity: raise FullCapacityError() room = self._empty_rooms.pop() last_key, new_key = room.keys guest = Guest(guest_name, room.room_number, KeyCard(last_key, new_key)) self._name2guest[guest_name] = guest self._name2room[guest_name] = room return guest def is_checkedin(self, guest_name): """ Checks if the guest is a guest at this Hotel Return: True if the guest is checked in at this Hotel; False otherwise Raises: ValueError if guest name is not of str type """ if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") return guest_name in self._name2guest def checkout(self, guest_name): """ Checks out the guest from the hotel Raises: ValueError if guest name is not of str type """ if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") if guest_name in self._name2guest: del self._name2guest[guest_name] room = self._name2room.pop(guest_name) self._empty_rooms.append(room) def room_of(self, guest_name): """ Provides the room for the guest Return: the corresponding Room Raises: ValueError if named guest is not a string or is not checked in at this hotel """ if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") if guest_name not in self._name2room: raise ValueError( "guest {0} is not checked in at this hotel".format(guest_name)) return self._name2room[guest_name]
30.067616
80
0.590602
import re class Key(object): pass class KeyCard(object): def __init__(self, first_key, second_key): if not isinstance(first_key, Key): raise ValueError("First key is not of Key type") if not isinstance(second_key, Key): raise ValueError("Second key is not of Key type") self._keys = (first_key, second_key) @property def first_key(self): return self._keys[0] @property def second_key(self): return self._keys[1] class Lock(object): def __init__(self, first_key, second_key): if not isinstance(first_key, Key): raise ValueError("First key is not of Key type") if not isinstance(second_key, Key): raise ValueError("Second key is not of Key type") self._keys = (first_key, second_key) def can_be_unlocked(self, keycard): if not isinstance(keycard, KeyCard): raise ValueError("keycard is not of KeyCard type") return self._keys[0] == keycard.first_key and \ self._keys[1] == keycard.second_key class Room(object): def __init__(self, room_number, lock): if type(room_number) != int: raise ValueError("room_number is not of integer type") if room_number < 1: raise ValueError("room_number is less than 1") if not isinstance(lock, Lock): raise ValueError("lock is not of Lock type") self._number = room_number self._lock = lock @property def last_key(self): return self._last_key @last_key.setter def last_key(self, key): self._last_key = key @property def keys(self): k = self.last_key self.last_key = Key() return (k, self.last_key) @property def room_number(self): return self._number @property def lock(self): return self._lock class Guest(object): def __init__(self, name, room_number, keycard): if type(room_number) != int: raise ValueError("room_number is not of integer type") if room_number < 1: raise ValueError("room_number is less than 1") if not isinstance(name, str): raise ValueError("name is not of string type") if len(name) < 2: raise ValueError("name is less than 2 characters long") if re.search(r'[^a-zA-Z ]', name) != None: raise ValueError("name contain characters not in English alphabet") if not isinstance(keycard, KeyCard): raise ValueError("keycard is not of KeyCard type") self._guest_name = name self._room_number = room_number self._keycard = keycard @property def guest_name(self): return self._guest_name @property def keycard(self): return self._keycard @property def room_number(self): return self._room_number def is_checkedin(self, hotel): if not isinstance(hotel, Hotel): raise ValueError("hotel is not of Hotel type") return hotel.is_checkedin(self._guest_name) class FullCapacityError(RuntimeError): pass class Hotel(object): def __init__(self, N): if type(N) != int: raise ValueError("N is not of int type") if N < 10 or N > 1000: raise ValueError("N is not between 10 and 1000, both inclusive") self._name2guest = {} self._name2room = {} self._capacity = N self._empty_rooms = [] for i in range(1, N + 1): k = Key() r = Room(i, Lock(k, k)) r.last_key = k self._empty_rooms.append(r) def checkin(self, guest_name): if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") if guest_name in self._name2guest: raise ValueError( "guest named {0} is already checked in".format(guest_name)) if len(self._name2guest) >= self._capacity: raise FullCapacityError() room = self._empty_rooms.pop() last_key, new_key = room.keys guest = Guest(guest_name, room.room_number, KeyCard(last_key, new_key)) self._name2guest[guest_name] = guest self._name2room[guest_name] = room return guest def is_checkedin(self, guest_name): if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") return guest_name in self._name2guest def checkout(self, guest_name): if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") if guest_name in self._name2guest: del self._name2guest[guest_name] room = self._name2room.pop(guest_name) self._empty_rooms.append(room) def room_of(self, guest_name): if not isinstance(guest_name, str): raise ValueError("guest name is not of string type") if guest_name not in self._name2room: raise ValueError( "guest {0} is not checked in at this hotel".format(guest_name)) return self._name2room[guest_name]
true
true
f711a55112cb377c654e631988bd9f39095ca6c3
2,580
py
Python
src/verstr/__init__.py
BRGM/verstr
2a0fdd2bc359db94d0730824a07463bf23535e18
[ "MIT" ]
1
2021-12-02T08:02:21.000Z
2021-12-02T08:02:21.000Z
src/verstr/__init__.py
BRGM/verstr
2a0fdd2bc359db94d0730824a07463bf23535e18
[ "MIT" ]
null
null
null
src/verstr/__init__.py
BRGM/verstr
2a0fdd2bc359db94d0730824a07463bf23535e18
[ "MIT" ]
null
null
null
""" Make comparing version strings super simple. If you want codes using your package to be able to verify its version as easily as:: # user_code.py import my_package assert my_package.__version__ >= "1.1" Just customize your package as follows:: # my_package/__init__.py import verstr __version__ = verstr.verstr("1.2.4") """ import collections import packaging.version __all__ = ['verstr'] def verstr(str_version, mode="str"): """ returns a comparable version object. verstr(str_version) verstr(str_version, mode) Parameters ---------- str_version: str A string that follows PEP 440, the standard version scheme for Python packages mode: str A string to select the type of the returned value. Returns ------- VersionCompareMixin The comparable version object. Its type depends on the `mode` argument: 'str' -> VersionString 'userstr' -> VersionUserString 'interface' -> VersionInterface """ modes = dict( str=VersionString, userstr=VersionUserString, interface=VersionInterface ) try: cls = modes[mode] except KeyError: raise ValueError( f"'mode' argument must be in {list(modes)}, " f"get {mode!r} instead." ) return cls(str_version) def to_version(str_version): return packaging.version.Version(str(str_version)) class VersionCompareMixin: def _comp_op(str_op): def op(self, other): return getattr(to_version(self), str_op)(to_version(other)) op.__name__ = str_op return op __eq__ = _comp_op("__eq__") __lt__ = _comp_op("__lt__") __le__ = _comp_op("__le__") __gt__ = _comp_op("__gt__") __ge__ = _comp_op("__ge__") del _comp_op class VersionString(VersionCompareMixin, str): def __new__(cls, object): return super().__new__(cls, str(to_version(object))) class VersionUserString(VersionCompareMixin, collections.UserString): def __init__(self, data): self.data = data @property def data(self): return self.__dict__['data'] @data.setter def data(self, data): self.__dict__['data'] = str(to_version(data)) class VersionInterface(VersionCompareMixin): def __init__(self, version): self._version = to_version(version) def __repr__(self): return str(self._version) try: from . import _version __version__ = verstr(_version.version) except ImportError: __version__ = None
21.864407
86
0.649225
import collections import packaging.version __all__ = ['verstr'] def verstr(str_version, mode="str"): modes = dict( str=VersionString, userstr=VersionUserString, interface=VersionInterface ) try: cls = modes[mode] except KeyError: raise ValueError( f"'mode' argument must be in {list(modes)}, " f"get {mode!r} instead." ) return cls(str_version) def to_version(str_version): return packaging.version.Version(str(str_version)) class VersionCompareMixin: def _comp_op(str_op): def op(self, other): return getattr(to_version(self), str_op)(to_version(other)) op.__name__ = str_op return op __eq__ = _comp_op("__eq__") __lt__ = _comp_op("__lt__") __le__ = _comp_op("__le__") __gt__ = _comp_op("__gt__") __ge__ = _comp_op("__ge__") del _comp_op class VersionString(VersionCompareMixin, str): def __new__(cls, object): return super().__new__(cls, str(to_version(object))) class VersionUserString(VersionCompareMixin, collections.UserString): def __init__(self, data): self.data = data @property def data(self): return self.__dict__['data'] @data.setter def data(self, data): self.__dict__['data'] = str(to_version(data)) class VersionInterface(VersionCompareMixin): def __init__(self, version): self._version = to_version(version) def __repr__(self): return str(self._version) try: from . import _version __version__ = verstr(_version.version) except ImportError: __version__ = None
true
true
f711a59015e986f0a404d98866e778f8d9c0833f
237,821
py
Python
seleniumbase/fixtures/base_case.py
Forsaj1/SeleniumBase
a6db2e4866fa80f23738b1d9602915f4aefa50b1
[ "MIT" ]
null
null
null
seleniumbase/fixtures/base_case.py
Forsaj1/SeleniumBase
a6db2e4866fa80f23738b1d9602915f4aefa50b1
[ "MIT" ]
null
null
null
seleniumbase/fixtures/base_case.py
Forsaj1/SeleniumBase
a6db2e4866fa80f23738b1d9602915f4aefa50b1
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ The BaseCase class is the main gateway for using The SeleniumBase Framework. It inherits Python's unittest.TestCase class, and runs with Pytest or Nose. All tests using BaseCase automatically launch WebDriver browsers for tests. Usage: from seleniumbase import BaseCase class MyTestClass(BaseCase): def test_anything(self): # Write your code here. Example: self.open("https://github.com/") self.update_text("input.header-search-input", "SeleniumBase\n") self.click('a[href="/seleniumbase/SeleniumBase"]') self.assert_element("div.repository-content") .... SeleniumBase methods expand and improve on existing WebDriver commands. Improvements include making WebDriver more robust, reliable, and flexible. Page elements are given enough time to load before WebDriver acts on them. Code becomes greatly simplified and easier to maintain. """ import codecs import json import logging import math import os import re import sys import time import urllib3 import unittest import uuid from selenium.common.exceptions import (StaleElementReferenceException, MoveTargetOutOfBoundsException, WebDriverException) from selenium.common import exceptions as selenium_exceptions from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.remote.remote_connection import LOGGER from selenium.webdriver.support.ui import Select from seleniumbase import config as sb_config from seleniumbase.common import decorators from seleniumbase.config import settings from seleniumbase.core.testcase_manager import TestcaseDataPayload from seleniumbase.core.testcase_manager import TestcaseManager from seleniumbase.core import download_helper from seleniumbase.core import log_helper from seleniumbase.core import settings_parser from seleniumbase.core import tour_helper from seleniumbase.core import visual_helper from seleniumbase.fixtures import constants from seleniumbase.fixtures import js_utils from seleniumbase.fixtures import page_actions from seleniumbase.fixtures import page_utils from seleniumbase.fixtures import shared_utils from seleniumbase.fixtures import xpath_to_css logging.getLogger("requests").setLevel(logging.ERROR) logging.getLogger("urllib3").setLevel(logging.ERROR) urllib3.disable_warnings() LOGGER.setLevel(logging.WARNING) ECI_Exception = selenium_exceptions.ElementClickInterceptedException ENI_Exception = selenium_exceptions.ElementNotInteractableException class BaseCase(unittest.TestCase): ''' A base test case that wraps methods for enhanced usage. You can also add your own methods here. ''' def __init__(self, *args, **kwargs): super(BaseCase, self).__init__(*args, **kwargs) self.driver = None self.environment = None self.env = None # Add a shortened version of self.environment self.__last_url_of_delayed_assert = "data:," self.__last_page_load_url = "data:," self.__last_page_screenshot = None self.__last_page_screenshot_png = None self.__last_page_url = None self.__last_page_source = None self.__added_pytest_html_extra = None self.__delayed_assert_count = 0 self.__delayed_assert_failures = [] self.__device_width = None self.__device_height = None self.__device_pixel_ratio = None # Requires self._* instead of self.__* for external class use self._html_report_extra = [] # (Used by pytest_plugin.py) self._default_driver = None self._drivers_list = [] self._tour_steps = {} def open(self, url): """ Navigates the current browser window to the specified page. """ self.__last_page_load_url = None if url.startswith("://"): # Convert URLs such as "://google.com" into "https://google.com" url = "https" + url self.driver.get(url) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def open_url(self, url): """ Same as open() - Original saved for backwards compatibility. """ self.open(url) def get(self, url): """ Same as open() - WebDriver uses this method name. """ self.open(url) def visit(self, url): """ Same as open() - Some JS frameworks use this method name. """ self.open(url) def click(self, selector, by=By.CSS_SELECTOR, timeout=None, delay=0): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) if page_utils.is_link_text_selector(selector) or by == By.LINK_TEXT: if not self.is_link_text_visible(selector): # Handle a special case of links hidden in dropdowns self.click_link_text(selector, timeout=timeout) return if page_utils.is_partial_link_text_selector(selector) or ( by == By.PARTIAL_LINK_TEXT): if not self.is_partial_link_text_visible(selector): # Handle a special case of partial links hidden in dropdowns self.click_partial_link_text(selector, timeout=timeout) return element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) pre_action_url = self.driver.current_url if delay and delay > 0: time.sleep(delay) try: if self.browser == 'ie' and by == By.LINK_TEXT: # An issue with clicking Link Text on IE means using jquery self.__jquery_click(selector, by=by) elif self.browser == "safari": if by == By.LINK_TEXT: self.__jquery_click(selector, by=by) else: self.__js_click(selector, by=by) else: # Normal click element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) if self.browser == "safari": if by == By.LINK_TEXT: self.__jquery_click(selector, by=by) else: self.__js_click(selector, by=by) else: element.click() except (WebDriverException, MoveTargetOutOfBoundsException): self.wait_for_ready_state_complete() try: self.__js_click(selector, by=by) except Exception: try: self.__jquery_click(selector, by=by) except Exception: # One more attempt to click on the element element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) element.click() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def slow_click(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Similar to click(), but pauses for a brief moment before clicking. When used in combination with setting the user-agent, you can often bypass bot-detection by tricking websites into thinking that you're not a bot. (Useful on websites that block web automation tools.) To set the user-agent, use: ``--agent=AGENT``. Here's an example message from GitHub's bot-blocker: ``You have triggered an abuse detection mechanism...`` """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if not self.demo_mode: self.click(selector, by=by, timeout=timeout, delay=1.05) else: # Demo Mode already includes a small delay self.click(selector, by=by, timeout=timeout, delay=0.25) def double_click(self, selector, by=By.CSS_SELECTOR, timeout=None): from selenium.webdriver.common.action_chains import ActionChains if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) pre_action_url = self.driver.current_url try: actions = ActionChains(self.driver) actions.move_to_element(element) actions.double_click(element) actions.perform() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) actions = ActionChains(self.driver) actions.move_to_element(element) actions.double_click(element) actions.perform() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def click_chain(self, selectors_list, by=By.CSS_SELECTOR, timeout=None, spacing=0): """ This method clicks on a list of elements in succession. 'spacing' is the amount of time to wait between clicks. (sec) """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) for selector in selectors_list: self.click(selector, by=by, timeout=timeout) if spacing > 0: time.sleep(spacing) def type(self, selector, text, by=By.CSS_SELECTOR, timeout=None, retry=False): """ The short version of update_text(), which clears existing text and adds new text into the text field. We want to keep the other version for backward compatibility. """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.update_text(selector, text, by=by, timeout=timeout, retry=retry) def input(self, selector, text, by=By.CSS_SELECTOR, timeout=None, retry=False): """ Same as update_text(). """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.update_text(selector, text, by=by, timeout=timeout, retry=retry) def update_text(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None, retry=False): """ This method updates an element's text field with new text. Has multiple parts: * Waits for the element to be visible. * Waits for the element to be interactive. * Clears the text field. * Types in the new text. * Hits Enter/Submit (if the text ends in "\n"). @Params selector - the selector of the text field new_value - the new value to type into the text field by - the type of selector to search by (Default: CSS Selector) timeout - how long to wait for the selector to be visible retry - if True, use JS if the Selenium text update fails """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) try: element.clear() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) try: element.clear() except Exception: pass # Clearing the text field first isn't critical except Exception: pass # Clearing the text field first isn't critical self.__demo_mode_pause_if_active(tiny=True) pre_action_url = self.driver.current_url if type(new_value) is int or type(new_value) is float: new_value = str(new_value) try: if not new_value.endswith('\n'): element.send_keys(new_value) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() else: new_value = new_value[:-1] element.send_keys(new_value) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) element.clear() if not new_value.endswith('\n'): element.send_keys(new_value) else: new_value = new_value[:-1] element.send_keys(new_value) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except Exception: exc_message = self.__get_improved_exception_message() raise Exception(exc_message) if (retry and element.get_attribute('value') != new_value and ( not new_value.endswith('\n'))): logging.debug('update_text() is falling back to JavaScript!') self.set_value(selector, new_value, by=by) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def add_text(self, selector, text, by=By.CSS_SELECTOR, timeout=None): """ The more-reliable version of driver.send_keys() Similar to update_text(), but won't clear the text field first. """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) pre_action_url = self.driver.current_url try: if not text.endswith('\n'): element.send_keys(text) else: text = text[:-1] element.send_keys(text) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) if not text.endswith('\n'): element.send_keys(text) else: text = text[:-1] element.send_keys(text) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except Exception: exc_message = self.__get_improved_exception_message() raise Exception(exc_message) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def send_keys(self, selector, text, by=By.CSS_SELECTOR, timeout=None): """ Same as add_text() """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.add_text(selector, text, by=by, timeout=timeout) def submit(self, selector, by=By.CSS_SELECTOR): """ Alternative to self.driver.find_element_by_*(SELECTOR).submit() """ if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) element.submit() self.__demo_mode_pause_if_active() def refresh_page(self): self.__last_page_load_url = None self.driver.refresh() self.wait_for_ready_state_complete() def refresh(self): """ The shorter version of self.refresh_page() """ self.refresh_page() def get_current_url(self): current_url = self.driver.current_url if "%" in current_url and sys.version_info[0] >= 3: try: from urllib.parse import unquote current_url = unquote(current_url, errors='strict') except Exception: pass return current_url def get_page_source(self): self.wait_for_ready_state_complete() return self.driver.page_source def get_page_title(self): self.wait_for_ready_state_complete() self.wait_for_element_present("title", timeout=settings.SMALL_TIMEOUT) time.sleep(0.03) return self.driver.title def get_title(self): """ The shorter version of self.get_page_title() """ return self.get_page_title() def go_back(self): self.__last_page_load_url = None self.driver.back() if self.browser == "safari": self.driver.refresh() self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def go_forward(self): self.__last_page_load_url = None self.driver.forward() if self.browser == "safari": self.driver.refresh() self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def is_element_present(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) return page_actions.is_element_present(self.driver, selector, by) def is_element_visible(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) return page_actions.is_element_visible(self.driver, selector, by) def is_text_visible(self, text, selector="html", by=By.CSS_SELECTOR): self.wait_for_ready_state_complete() time.sleep(0.01) selector, by = self.__recalculate_selector(selector, by) return page_actions.is_text_visible(self.driver, text, selector, by) def is_link_text_visible(self, link_text): self.wait_for_ready_state_complete() time.sleep(0.01) return page_actions.is_element_visible(self.driver, link_text, by=By.LINK_TEXT) def is_partial_link_text_visible(self, partial_link_text): self.wait_for_ready_state_complete() time.sleep(0.01) return page_actions.is_element_visible(self.driver, partial_link_text, by=By.PARTIAL_LINK_TEXT) def is_link_text_present(self, link_text): """ Returns True if the link text appears in the HTML of the page. The element doesn't need to be visible, such as elements hidden inside a dropdown selection. """ soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if html_link.text.strip() == link_text.strip(): return True return False def is_partial_link_text_present(self, link_text): """ Returns True if the partial link appears in the HTML of the page. The element doesn't need to be visible, such as elements hidden inside a dropdown selection. """ soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if link_text.strip() in html_link.text.strip(): return True return False def get_link_attribute(self, link_text, attribute, hard_fail=True): """ Finds a link by link text and then returns the attribute's value. If the link text or attribute cannot be found, an exception will get raised if hard_fail is True (otherwise None is returned). """ soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if html_link.text.strip() == link_text.strip(): if html_link.has_attr(attribute): attribute_value = html_link.get(attribute) return attribute_value if hard_fail: raise Exception( 'Unable to find attribute {%s} from link text {%s}!' % (attribute, link_text)) else: return None if hard_fail: raise Exception("Link text {%s} was not found!" % link_text) else: return None def get_link_text_attribute(self, link_text, attribute, hard_fail=True): """ Same as self.get_link_attribute() Finds a link by link text and then returns the attribute's value. If the link text or attribute cannot be found, an exception will get raised if hard_fail is True (otherwise None is returned). """ return self.get_link_attribute(link_text, attribute, hard_fail) def get_partial_link_text_attribute(self, link_text, attribute, hard_fail=True): """ Finds a link by partial link text and then returns the attribute's value. If the partial link text or attribute cannot be found, an exception will get raised if hard_fail is True (otherwise None is returned). """ soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if link_text.strip() in html_link.text.strip(): if html_link.has_attr(attribute): attribute_value = html_link.get(attribute) return attribute_value if hard_fail: raise Exception( 'Unable to find attribute {%s} from ' 'partial link text {%s}!' % (attribute, link_text)) else: return None if hard_fail: raise Exception( "Partial Link text {%s} was not found!" % link_text) else: return None def click_link_text(self, link_text, timeout=None): """ This method clicks link text on a page """ # If using phantomjs, might need to extract and open the link directly if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if self.browser == 'phantomjs': if self.is_link_text_visible(link_text): element = self.wait_for_link_text_visible( link_text, timeout=timeout) element.click() return self.open(self.__get_href_from_link_text(link_text)) return if self.browser == "safari": self.__jquery_click(link_text, by=By.LINK_TEXT) return if not self.is_link_text_present(link_text): self.wait_for_link_text_present(link_text, timeout=timeout) pre_action_url = self.get_current_url() try: element = self.wait_for_link_text_visible( link_text, timeout=0.2) self.__demo_mode_highlight_if_active(link_text, by=By.LINK_TEXT) try: element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_link_text_visible( link_text, timeout=timeout) element.click() except Exception: found_css = False text_id = self.get_link_attribute(link_text, "id", False) if text_id: link_css = '[id="%s"]' % link_text found_css = True if not found_css: href = self.__get_href_from_link_text(link_text, False) if href: if href.startswith('/') or page_utils.is_valid_url(href): link_css = '[href="%s"]' % href found_css = True if not found_css: ngclick = self.get_link_attribute(link_text, "ng-click", False) if ngclick: link_css = '[ng-click="%s"]' % ngclick found_css = True if not found_css: onclick = self.get_link_attribute(link_text, "onclick", False) if onclick: link_css = '[onclick="%s"]' % onclick found_css = True success = False if found_css: if self.is_element_visible(link_css): self.click(link_css) success = True else: # The link text might be hidden under a dropdown menu success = self.__click_dropdown_link_text( link_text, link_css) if not success: element = self.wait_for_link_text_visible( link_text, timeout=settings.MINI_TIMEOUT) element.click() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def click_link(self, link_text, timeout=None): """ Same as self.click_link_text() """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.click_link_text(link_text, timeout=timeout) def click_partial_link_text(self, partial_link_text, timeout=None): """ This method clicks the partial link text on a page. """ # If using phantomjs, might need to extract and open the link directly if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if self.browser == 'phantomjs': if self.is_partial_link_text_visible(partial_link_text): element = self.wait_for_partial_link_text(partial_link_text) element.click() return soup = self.get_beautiful_soup() html_links = soup.fetch('a') for html_link in html_links: if partial_link_text in html_link.text: for html_attribute in html_link.attrs: if html_attribute[0] == 'href': href = html_attribute[1] if href.startswith('//'): link = "http:" + href elif href.startswith('/'): url = self.driver.current_url domain_url = self.get_domain_url(url) link = domain_url + href else: link = href self.open(link) return raise Exception( 'Could not parse link from partial link_text ' '{%s}' % partial_link_text) raise Exception( "Partial link text {%s} was not found!" % partial_link_text) if not self.is_partial_link_text_present(partial_link_text): self.wait_for_partial_link_text_present( partial_link_text, timeout=timeout) pre_action_url = self.get_current_url() try: element = self.wait_for_partial_link_text( partial_link_text, timeout=0.2) self.__demo_mode_highlight_if_active( partial_link_text, by=By.LINK_TEXT) try: element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_partial_link_text( partial_link_text, timeout=timeout) element.click() except Exception: found_css = False text_id = self.get_partial_link_text_attribute( partial_link_text, "id", False) if text_id: link_css = '[id="%s"]' % partial_link_text found_css = True if not found_css: href = self.__get_href_from_partial_link_text( partial_link_text, False) if href: if href.startswith('/') or page_utils.is_valid_url(href): link_css = '[href="%s"]' % href found_css = True if not found_css: ngclick = self.get_partial_link_text_attribute( partial_link_text, "ng-click", False) if ngclick: link_css = '[ng-click="%s"]' % ngclick found_css = True if not found_css: onclick = self.get_partial_link_text_attribute( partial_link_text, "onclick", False) if onclick: link_css = '[onclick="%s"]' % onclick found_css = True success = False if found_css: if self.is_element_visible(link_css): self.click(link_css) success = True else: # The link text might be hidden under a dropdown menu success = self.__click_dropdown_partial_link_text( partial_link_text, link_css) if not success: element = self.wait_for_link_text_visible( partial_link_text, timeout=settings.MINI_TIMEOUT) element.click() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def get_text(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.wait_for_ready_state_complete() time.sleep(0.01) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout) try: element_text = element.text except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout) element_text = element.text return element_text def get_attribute(self, selector, attribute, by=By.CSS_SELECTOR, timeout=None, hard_fail=True): """ This method uses JavaScript to get the value of an attribute. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() time.sleep(0.01) element = page_actions.wait_for_element_present( self.driver, selector, by, timeout) try: attribute_value = element.get_attribute(attribute) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = page_actions.wait_for_element_present( self.driver, selector, by, timeout) attribute_value = element.get_attribute(attribute) if attribute_value is not None: return attribute_value else: if hard_fail: raise Exception("Element {%s} has no attribute {%s}!" % ( selector, attribute)) else: return None def set_attribute(self, selector, attribute, value, by=By.CSS_SELECTOR, timeout=None): """ This method uses JavaScript to set/update an attribute. Only the first matching selector from querySelector() is used. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) if self.is_element_visible(selector, by=by): try: self.scroll_to(selector, by=by, timeout=timeout) except Exception: pass attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) value = re.escape(value) value = self.__escape_quotes_if_needed(value) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""document.querySelector('%s').setAttribute('%s','%s');""" % (css_selector, attribute, value)) self.execute_script(script) def set_attributes(self, selector, attribute, value, by=By.CSS_SELECTOR): """ This method uses JavaScript to set/update a common attribute. All matching selectors from querySelectorAll() are used. Example => (Make all links on a website redirect to Google): self.set_attributes("a", "href", "https://google.com") """ selector, by = self.__recalculate_selector(selector, by) attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) value = re.escape(value) value = self.__escape_quotes_if_needed(value) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ $elements[index].setAttribute('%s','%s');}""" % (css_selector, attribute, value)) try: self.execute_script(script) except Exception: pass def set_attribute_all(self, selector, attribute, value, by=By.CSS_SELECTOR): """ Same as set_attributes(), but using querySelectorAll naming scheme. This method uses JavaScript to set/update a common attribute. All matching selectors from querySelectorAll() are used. Example => (Make all links on a website redirect to Google): self.set_attribute_all("a", "href", "https://google.com") """ self.set_attributes(selector, attribute, value, by=by) def remove_attribute(self, selector, attribute, by=By.CSS_SELECTOR, timeout=None): """ This method uses JavaScript to remove an attribute. Only the first matching selector from querySelector() is used. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) if self.is_element_visible(selector, by=by): try: self.scroll_to(selector, by=by, timeout=timeout) except Exception: pass attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""document.querySelector('%s').removeAttribute('%s');""" % (css_selector, attribute)) self.execute_script(script) def remove_attributes(self, selector, attribute, by=By.CSS_SELECTOR): """ This method uses JavaScript to remove a common attribute. All matching selectors from querySelectorAll() are used. """ selector, by = self.__recalculate_selector(selector, by) attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ $elements[index].removeAttribute('%s');}""" % (css_selector, attribute)) try: self.execute_script(script) except Exception: pass def get_property_value(self, selector, property, by=By.CSS_SELECTOR, timeout=None): """ Returns the property value of a page element's computed style. Example: opacity = self.get_property_value("html body a", "opacity") self.assertTrue(float(opacity) > 0, "Element not visible!") """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() page_actions.wait_for_element_present( self.driver, selector, by, timeout) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't run action if can't convert to CSS_Selector for JavaScript raise Exception( "Exception: Could not convert {%s}(by=%s) to CSS_SELECTOR!" % ( selector, by)) selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) script = ("""var $elm = document.querySelector('%s'); $val = window.getComputedStyle($elm).getPropertyValue('%s'); return $val;""" % (selector, property)) value = self.execute_script(script) if value is not None: return value else: return "" # Return an empty string if the property doesn't exist def get_image_url(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Extracts the URL from an image element on the page. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.get_attribute(selector, attribute='src', by=by, timeout=timeout) def find_elements(self, selector, by=By.CSS_SELECTOR, limit=0): """ Returns a list of matching WebElements. Elements could be either hidden or visible on the page. If "limit" is set and > 0, will only return that many elements. """ selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() time.sleep(0.05) elements = self.driver.find_elements(by=by, value=selector) if limit and limit > 0 and len(elements) > limit: elements = elements[:limit] return elements def find_visible_elements(self, selector, by=By.CSS_SELECTOR, limit=0): """ Returns a list of matching WebElements that are visible. If "limit" is set and > 0, will only return that many elements. """ selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() time.sleep(0.05) v_elems = page_actions.find_visible_elements(self.driver, selector, by) if limit and limit > 0 and len(v_elems) > limit: v_elems = v_elems[:limit] return v_elems def click_visible_elements(self, selector, by=By.CSS_SELECTOR, limit=0): """ Finds all matching page elements and clicks visible ones in order. If a click reloads or opens a new page, the clicking will stop. If no matching elements appear, an Exception will be raised. If "limit" is set and > 0, will only click that many elements. Also clicks elements that become visible from previous clicks. Works best for actions such as clicking all checkboxes on a page. Example: self.click_visible_elements('input[type="checkbox"]') """ selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) elements = self.find_elements(selector, by=by) if self.browser == "safari": if not limit: limit = 0 num_elements = len(elements) if num_elements == 0: raise Exception( "No matching elements found for selector {%s}!" % selector) elif num_elements < limit or limit == 0: limit = num_elements selector, by = self.__recalculate_selector(selector, by) css_selector = self.convert_to_css_selector(selector, by=by) last_css_chunk = css_selector.split(' ')[-1] if ":" in last_css_chunk: self.__js_click_all(css_selector) self.wait_for_ready_state_complete() return else: for i in range(1, limit+1): new_selector = css_selector + ":nth-of-type(%s)" % str(i) if self.is_element_visible(new_selector): self.__js_click(new_selector) self.wait_for_ready_state_complete() return click_count = 0 for element in elements: if limit and limit > 0 and click_count >= limit: return try: if element.is_displayed(): self.__scroll_to_element(element) element.click() click_count += 1 self.wait_for_ready_state_complete() except ECI_Exception: continue # ElementClickInterceptedException (Overlay likely) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.03) try: if element.is_displayed(): self.__scroll_to_element(element) element.click() click_count += 1 self.wait_for_ready_state_complete() except (StaleElementReferenceException, ENI_Exception): return # Probably on new page / Elements are all stale def click_nth_visible_element(self, selector, number, by=By.CSS_SELECTOR): """ Finds all matching page elements and clicks the nth visible one. Example: self.click_nth_visible_element('[type="checkbox"]', 5) (Clicks the 5th visible checkbox on the page.) """ elements = self.find_visible_elements(selector, by=by) if len(elements) < number: raise Exception("Not enough matching {%s} elements of type {%s} to" " click number %s!" % (selector, by, number)) number = number - 1 if number < 0: number = 0 element = elements[number] self.wait_for_ready_state_complete() try: self.__scroll_to_element(element) element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) self.__scroll_to_element(element) element.click() def click_if_visible(self, selector, by=By.CSS_SELECTOR): """ If the page selector exists and is visible, clicks on the element. This method only clicks on the first matching element found. (Use click_visible_elements() to click all matching elements.) """ self.wait_for_ready_state_complete() if self.is_element_visible(selector, by=by): self.click(selector, by=by) def is_checked(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Determines if a checkbox or a radio button element is checked. Returns True if the element is checked. Returns False if the element is not checked. If the element is not present on the page, raises an exception. If the element is not a checkbox or radio, raises an exception. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) kind = self.get_attribute(selector, "type", by=by, timeout=timeout) if kind != "checkbox" and kind != "radio": raise Exception("Expecting a checkbox or a radio button element!") is_checked = self.get_attribute( selector, "checked", by=by, timeout=timeout, hard_fail=False) if is_checked: return True else: # (NoneType) return False def is_selected(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Same as is_checked() """ return self.is_checked(selector, by=by, timeout=timeout) def check_if_unchecked(self, selector, by=By.CSS_SELECTOR): """ If a checkbox or radio button is not checked, will check it. """ selector, by = self.__recalculate_selector(selector, by) if not self.is_checked(selector, by=by): if self.is_element_visible(selector, by=by): self.click(selector, by=by) else: selector = self.convert_to_css_selector(selector, by=by) self.js_click(selector, by=By.CSS_SELECTOR) def select_if_unselected(self, selector, by=By.CSS_SELECTOR): """ Same as check_if_unchecked() """ self.check_if_unchecked(selector, by=by) def uncheck_if_checked(self, selector, by=By.CSS_SELECTOR): """ If a checkbox is checked, will uncheck it. """ selector, by = self.__recalculate_selector(selector, by) if self.is_checked(selector, by=by): if self.is_element_visible(selector, by=by): self.click(selector, by=by) else: selector = self.convert_to_css_selector(selector, by=by) self.js_click(selector, by=By.CSS_SELECTOR) def unselect_if_selected(self, selector, by=By.CSS_SELECTOR): """ Same as uncheck_if_checked() """ self.uncheck_if_checked(selector, by=by) def is_element_in_an_iframe(self, selector, by=By.CSS_SELECTOR): """ Returns True if the selector's element is located in an iframe. Otherwise returns False. """ selector, by = self.__recalculate_selector(selector, by) if self.is_element_present(selector, by=by): return False soup = self.get_beautiful_soup() iframe_list = soup.select('iframe') for iframe in iframe_list: iframe_identifier = None if iframe.has_attr('name') and len(iframe['name']) > 0: iframe_identifier = iframe['name'] elif iframe.has_attr('id') and len(iframe['id']) > 0: iframe_identifier = iframe['id'] elif iframe.has_attr('class') and len(iframe['class']) > 0: iframe_class = " ".join(iframe["class"]) iframe_identifier = '[class="%s"]' % iframe_class else: continue self.switch_to_frame(iframe_identifier) if self.is_element_present(selector, by=by): self.switch_to_default_content() return True self.switch_to_default_content() return False def switch_to_frame_of_element(self, selector, by=By.CSS_SELECTOR): """ Set driver control to the iframe containing element (assuming the element is in a single-nested iframe) and returns the iframe name. If element is not in an iframe, returns None, and nothing happens. May not work if multiple iframes are nested within each other. """ selector, by = self.__recalculate_selector(selector, by) if self.is_element_present(selector, by=by): return None soup = self.get_beautiful_soup() iframe_list = soup.select('iframe') for iframe in iframe_list: iframe_identifier = None if iframe.has_attr('name') and len(iframe['name']) > 0: iframe_identifier = iframe['name'] elif iframe.has_attr('id') and len(iframe['id']) > 0: iframe_identifier = iframe['id'] elif iframe.has_attr('class') and len(iframe['class']) > 0: iframe_class = " ".join(iframe["class"]) iframe_identifier = '[class="%s"]' % iframe_class else: continue try: self.switch_to_frame(iframe_identifier, timeout=1) if self.is_element_present(selector, by=by): return iframe_identifier except Exception: pass self.switch_to_default_content() try: self.switch_to_frame(selector, timeout=1) return selector except Exception: if self.is_element_present(selector, by=by): return "" raise Exception("Could not switch to iframe containing " "element {%s}!" % selector) def hover_on_element(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) if page_utils.is_xpath_selector(selector): selector = self.convert_to_css_selector(selector, By.XPATH) by = By.CSS_SELECTOR self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__demo_mode_highlight_if_active(selector, by) self.scroll_to(selector, by=by) time.sleep(0.05) # Settle down from scrolling before hovering return page_actions.hover_on_element(self.driver, selector) def hover_and_click(self, hover_selector, click_selector, hover_by=By.CSS_SELECTOR, click_by=By.CSS_SELECTOR, timeout=None): """ When you want to hover over an element or dropdown menu, and then click an element that appears after that. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) hover_selector, hover_by = self.__recalculate_selector( hover_selector, hover_by) hover_selector = self.convert_to_css_selector( hover_selector, hover_by) hover_by = By.CSS_SELECTOR click_selector, click_by = self.__recalculate_selector( click_selector, click_by) dropdown_element = self.wait_for_element_visible( hover_selector, by=hover_by, timeout=timeout) self.__demo_mode_highlight_if_active(hover_selector, hover_by) self.scroll_to(hover_selector, by=hover_by) pre_action_url = self.driver.current_url outdated_driver = False element = None try: if self.browser == "safari": # Use the workaround for hover-clicking on Safari raise Exception("This Exception will be caught.") page_actions.hover_element(self.driver, dropdown_element) except Exception: outdated_driver = True element = self.wait_for_element_present( click_selector, click_by, timeout) if click_by == By.LINK_TEXT: self.open(self.__get_href_from_link_text(click_selector)) elif click_by == By.PARTIAL_LINK_TEXT: self.open(self.__get_href_from_partial_link_text( click_selector)) else: self.js_click(click_selector, click_by) if not outdated_driver: element = page_actions.hover_and_click( self.driver, hover_selector, click_selector, hover_by, click_by, timeout) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() return element def hover_and_double_click(self, hover_selector, click_selector, hover_by=By.CSS_SELECTOR, click_by=By.CSS_SELECTOR, timeout=None): """ When you want to hover over an element or dropdown menu, and then double-click an element that appears after that. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) hover_selector, hover_by = self.__recalculate_selector( hover_selector, hover_by) hover_selector = self.convert_to_css_selector( hover_selector, hover_by) click_selector, click_by = self.__recalculate_selector( click_selector, click_by) dropdown_element = self.wait_for_element_visible( hover_selector, by=hover_by, timeout=timeout) self.__demo_mode_highlight_if_active(hover_selector, hover_by) self.scroll_to(hover_selector, by=hover_by) pre_action_url = self.driver.current_url outdated_driver = False element = None try: page_actions.hover_element(self.driver, dropdown_element) except Exception: outdated_driver = True element = self.wait_for_element_present( click_selector, click_by, timeout) if click_by == By.LINK_TEXT: self.open(self.__get_href_from_link_text(click_selector)) elif click_by == By.PARTIAL_LINK_TEXT: self.open(self.__get_href_from_partial_link_text( click_selector)) else: self.js_click(click_selector, click_by) if not outdated_driver: element = page_actions.hover_element_and_double_click( self.driver, dropdown_element, click_selector, click_by=By.CSS_SELECTOR, timeout=timeout) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() return element def __select_option(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, option_by="text", timeout=None): """ Selects an HTML <select> option by specification. Option specifications are by "text", "index", or "value". Defaults to "text" if option_by is unspecified or unknown. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(dropdown_selector): dropdown_by = By.XPATH self.wait_for_ready_state_complete() element = self.wait_for_element_present( dropdown_selector, by=dropdown_by, timeout=timeout) if self.is_element_visible(dropdown_selector, by=dropdown_by): self.__demo_mode_highlight_if_active( dropdown_selector, dropdown_by) pre_action_url = self.driver.current_url try: if option_by == "index": Select(element).select_by_index(option) elif option_by == "value": Select(element).select_by_value(option) else: Select(element).select_by_visible_text(option) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_present( dropdown_selector, by=dropdown_by, timeout=timeout) if option_by == "index": Select(element).select_by_index(option) elif option_by == "value": Select(element).select_by_value(option) else: Select(element).select_by_visible_text(option) if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def select_option_by_text(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, timeout=None): """ Selects an HTML <select> option by option text. @Params dropdown_selector - the <select> selector option - the text of the option """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__select_option(dropdown_selector, option, dropdown_by=dropdown_by, option_by="text", timeout=timeout) def select_option_by_index(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, timeout=None): """ Selects an HTML <select> option by option index. @Params dropdown_selector - the <select> selector option - the index number of the option """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__select_option(dropdown_selector, option, dropdown_by=dropdown_by, option_by="index", timeout=timeout) def select_option_by_value(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, timeout=None): """ Selects an HTML <select> option by option value. @Params dropdown_selector - the <select> selector option - the value property of the option """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__select_option(dropdown_selector, option, dropdown_by=dropdown_by, option_by="value", timeout=timeout) def load_html_string(self, html_string, new_page=True): """ Loads an HTML string into the web browser. If new_page==True, the page will switch to: "data:text/html," If new_page==False, will load HTML into the current page. """ soup = self.get_beautiful_soup(html_string) scripts = soup.findAll("script") for script in scripts: html_string = html_string.replace(str(script), "") soup = self.get_beautiful_soup(html_string) found_head = False found_body = False html_head = None html_body = None if soup.head and len(str(soup.head)) > 12: found_head = True html_head = str(soup.head) html_head = re.escape(html_head) html_head = self.__escape_quotes_if_needed(html_head) html_head = html_head.replace('\\ ', ' ') if soup.body and len(str(soup.body)) > 12: found_body = True html_body = str(soup.body) html_body = re.escape(html_body) html_body = self.__escape_quotes_if_needed(html_body) html_body = html_body.replace('\\ ', ' ') html_string = re.escape(html_string) html_string = self.__escape_quotes_if_needed(html_string) html_string = html_string.replace('\\ ', ' ') if new_page: self.open("data:text/html,") inner_head = '''document.getElementsByTagName("head")[0].innerHTML''' inner_body = '''document.getElementsByTagName("body")[0].innerHTML''' if not found_body: self.execute_script( '''%s = \"%s\"''' % (inner_body, html_string)) elif found_body and not found_head: self.execute_script( '''%s = \"%s\"''' % (inner_body, html_body)) elif found_body and found_head: self.execute_script( '''%s = \"%s\"''' % (inner_head, html_head)) self.execute_script( '''%s = \"%s\"''' % (inner_body, html_body)) else: raise Exception("Logic Error!") for script in scripts: js_code = script.string js_code_lines = js_code.split('\n') new_lines = [] for line in js_code_lines: line = line.strip() new_lines.append(line) js_code = '\n'.join(new_lines) js_utils.add_js_code(self.driver, js_code) def load_html_file(self, html_file, new_page=True): """ Loads a local html file into the browser from a relative file path. If new_page==True, the page will switch to: "data:text/html," If new_page==False, will load HTML into the current page. Local images and other local src content WILL BE IGNORED. """ if len(html_file) < 6 or not html_file.endswith(".html"): raise Exception('Expecting a ".html" file!') abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % html_file f = open(file_path, 'r') html_string = f.read().strip() f.close() self.load_html_string(html_string, new_page) def open_html_file(self, html_file): """ Opens a local html file into the browser from a relative file path. The URL displayed in the web browser will start with "file://". """ if len(html_file) < 6 or not html_file.endswith(".html"): raise Exception('Expecting a ".html" file!') abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % html_file self.open("file://" + file_path) def execute_script(self, script): return self.driver.execute_script(script) def execute_async_script(self, script, timeout=None): if not timeout: timeout = settings.EXTREME_TIMEOUT return js_utils.execute_async_script(self.driver, script, timeout) def safe_execute_script(self, script): """ When executing a script that contains a jQuery command, it's important that the jQuery library has been loaded first. This method will load jQuery if it wasn't already loaded. """ try: self.execute_script(script) except Exception: # The likely reason this fails is because: "jQuery is not defined" self.activate_jquery() # It's a good thing we can define it here self.execute_script(script) def set_window_rect(self, x, y, width, height): self.driver.set_window_rect(x, y, width, height) self.__demo_mode_pause_if_active() def set_window_size(self, width, height): self.driver.set_window_size(width, height) self.__demo_mode_pause_if_active() def maximize_window(self): self.driver.maximize_window() self.__demo_mode_pause_if_active() def switch_to_frame(self, frame, timeout=None): """ Wait for an iframe to appear, and switch to it. This should be usable as a drop-in replacement for driver.switch_to.frame(). @Params frame - the frame element, name, id, index, or selector timeout - the time to wait for the alert in seconds """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) page_actions.switch_to_frame(self.driver, frame, timeout) def switch_to_default_content(self): """ Brings driver control outside the current iframe. (If driver control is inside an iframe, the driver control will be set to one level above the current frame. If the driver control is not currenly in an iframe, nothing will happen.) """ self.driver.switch_to.default_content() def open_new_window(self, switch_to=True): """ Opens a new browser tab/window and switches to it by default. """ self.driver.execute_script("window.open('');") time.sleep(0.01) if switch_to: self.switch_to_window(len(self.driver.window_handles) - 1) def switch_to_window(self, window, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) page_actions.switch_to_window(self.driver, window, timeout) def switch_to_default_window(self): self.switch_to_window(0) def get_new_driver(self, browser=None, headless=None, servername=None, port=None, proxy=None, agent=None, switch_to=True, cap_file=None, cap_string=None, disable_csp=None, enable_sync=None, use_auto_ext=None, no_sandbox=None, disable_gpu=None, incognito=None, guest_mode=None, devtools=None, user_data_dir=None, extension_zip=None, extension_dir=None, is_mobile=False, d_width=None, d_height=None, d_p_r=None): """ This method spins up an extra browser for tests that require more than one. The first browser is already provided by tests that import base_case.BaseCase from seleniumbase. If parameters aren't specified, the method uses the same as the default driver. @Params browser - the browser to use. (Ex: "chrome", "firefox") headless - the option to run webdriver in headless mode servername - if using a Selenium Grid, set the host address here port - if using a Selenium Grid, set the host port here proxy - if using a proxy server, specify the "host:port" combo here switch_to - the option to switch to the new driver (default = True) cap_file - the file containing desired capabilities for the browser cap_string - the string with desired capabilities for the browser disable_csp - an option to disable Chrome's Content Security Policy enable_sync - the option to enable the Chrome Sync feature (Chrome) use_auto_ext - the option to enable Chrome's Automation Extension no_sandbox - the option to enable the "No-Sandbox" feature (Chrome) disable_gpu - the option to enable Chrome's "Disable GPU" feature incognito - the option to enable Chrome's Incognito mode (Chrome) guest - the option to enable Chrome's Guest mode (Chrome) devtools - the option to open Chrome's DevTools on start (Chrome) user_data_dir - Chrome's User Data Directory to use (Chrome-only) extension_zip - A Chrome Extension ZIP file to use (Chrome-only) extension_dir - A Chrome Extension folder to use (Chrome-only) is_mobile - the option to use the mobile emulator (Chrome-only) d_width - the device width of the mobile emulator (Chrome-only) d_height - the device height of the mobile emulator (Chrome-only) d_p_r - the device pixel ratio of the mobile emulator (Chrome-only) """ if self.browser == "remote" and self.servername == "localhost": raise Exception('Cannot use "remote" browser driver on localhost!' ' Did you mean to connect to a remote Grid server' ' such as BrowserStack or Sauce Labs? In that' ' case, you must specify the "server" and "port"' ' parameters on the command line! ' 'Example: ' '--server=user:key@hub.browserstack.com --port=80') browserstack_ref = ( 'https://browserstack.com/automate/capabilities') sauce_labs_ref = ( 'https://wiki.saucelabs.com/display/DOCS/Platform+Configurator#/') if self.browser == "remote" and not (self.cap_file or self.cap_string): raise Exception('Need to specify a desired capabilities file when ' 'using "--browser=remote". Add "--cap_file=FILE". ' 'File should be in the Python format used by: ' '%s OR ' '%s ' 'See SeleniumBase/examples/sample_cap_file_BS.py ' 'and SeleniumBase/examples/sample_cap_file_SL.py' % (browserstack_ref, sauce_labs_ref)) if browser is None: browser = self.browser browser_name = browser if headless is None: headless = self.headless if servername is None: servername = self.servername if port is None: port = self.port use_grid = False if servername != "localhost": # Use Selenium Grid (Use "127.0.0.1" for localhost Grid) use_grid = True proxy_string = proxy if proxy_string is None: proxy_string = self.proxy_string user_agent = agent if user_agent is None: user_agent = self.user_agent if disable_csp is None: disable_csp = self.disable_csp if enable_sync is None: enable_sync = self.enable_sync if use_auto_ext is None: use_auto_ext = self.use_auto_ext if no_sandbox is None: no_sandbox = self.no_sandbox if disable_gpu is None: disable_gpu = self.disable_gpu if incognito is None: incognito = self.incognito if guest_mode is None: guest_mode = self.guest_mode if devtools is None: devtools = self.devtools if user_data_dir is None: user_data_dir = self.user_data_dir if extension_zip is None: extension_zip = self.extension_zip if extension_dir is None: extension_dir = self.extension_dir # Due to https://stackoverflow.com/questions/23055651/ , skip extension # if self.demo_mode or self.masterqa_mode: # disable_csp = True test_id = self.__get_test_id() if cap_file is None: cap_file = self.cap_file if cap_string is None: cap_string = self.cap_string if is_mobile is None: is_mobile = False if d_width is None: d_width = self.__device_width if d_height is None: d_height = self.__device_height if d_p_r is None: d_p_r = self.__device_pixel_ratio valid_browsers = constants.ValidBrowsers.valid_browsers if browser_name not in valid_browsers: raise Exception("Browser: {%s} is not a valid browser option. " "Valid options = {%s}" % (browser, valid_browsers)) # Launch a web browser from seleniumbase.core import browser_launcher new_driver = browser_launcher.get_driver(browser_name=browser_name, headless=headless, use_grid=use_grid, servername=servername, port=port, proxy_string=proxy_string, user_agent=user_agent, cap_file=cap_file, cap_string=cap_string, disable_csp=disable_csp, enable_sync=enable_sync, use_auto_ext=use_auto_ext, no_sandbox=no_sandbox, disable_gpu=disable_gpu, incognito=incognito, guest_mode=guest_mode, devtools=devtools, user_data_dir=user_data_dir, extension_zip=extension_zip, extension_dir=extension_dir, test_id=test_id, mobile_emulator=is_mobile, device_width=d_width, device_height=d_height, device_pixel_ratio=d_p_r) self._drivers_list.append(new_driver) if switch_to: self.driver = new_driver if self.headless: # Make sure the invisible browser window is big enough width = settings.HEADLESS_START_WIDTH height = settings.HEADLESS_START_HEIGHT try: self.driver.set_window_size(width, height) self.wait_for_ready_state_complete() except Exception: # This shouldn't fail, but in case it does, # get safely through setUp() so that # WebDrivers can get closed during tearDown(). pass else: if self.browser == 'chrome' or self.browser == 'edge': width = settings.CHROME_START_WIDTH height = settings.CHROME_START_HEIGHT try: if self.maximize_option: self.driver.maximize_window() else: self.driver.set_window_size(width, height) self.wait_for_ready_state_complete() except Exception: pass # Keep existing browser resolution elif self.browser == 'firefox': pass # No changes elif self.browser == 'safari': if self.maximize_option: try: self.driver.maximize_window() self.wait_for_ready_state_complete() except Exception: pass # Keep existing browser resolution else: try: self.driver.set_window_rect(10, 30, 945, 630) except Exception: pass if self.start_page and len(self.start_page) >= 4: if page_utils.is_valid_url(self.start_page): self.open(self.start_page) else: new_start_page = "http://" + self.start_page if page_utils.is_valid_url(new_start_page): self.open(new_start_page) return new_driver def switch_to_driver(self, driver): """ Sets self.driver to the specified driver. You may need this if using self.get_new_driver() in your code. """ self.driver = driver def switch_to_default_driver(self): """ Sets self.driver to the default/original driver. """ self.driver = self._default_driver def save_screenshot(self, name, folder=None): """ The screenshot will be in PNG format. """ return page_actions.save_screenshot(self.driver, name, folder) def save_page_source(self, name, folder=None): """ Saves the page HTML to the current directory (or given subfolder). If the folder specified doesn't exist, it will get created. @Params name - The file name to save the current page's HTML to. folder - The folder to save the file to. (Default = current folder) """ return page_actions.save_page_source(self.driver, name, folder) def save_cookies(self, name="cookies.txt"): """ Saves the page cookies to the "saved_cookies" folder. """ cookies = self.driver.get_cookies() json_cookies = json.dumps(cookies) if name.endswith('/'): raise Exception("Invalid filename for Cookies!") if '/' in name: name = name.split('/')[-1] if len(name) < 1: raise Exception("Filename for Cookies is too short!") if not name.endswith(".txt"): name = name + ".txt" folder = constants.SavedCookies.STORAGE_FOLDER abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % folder if not os.path.exists(file_path): os.makedirs(file_path) cookies_file_path = "%s/%s" % (file_path, name) cookies_file = codecs.open(cookies_file_path, "w+") cookies_file.writelines(json_cookies) cookies_file.close() def load_cookies(self, name="cookies.txt"): """ Loads the page cookies from the "saved_cookies" folder. """ if name.endswith('/'): raise Exception("Invalid filename for Cookies!") if '/' in name: name = name.split('/')[-1] if len(name) < 1: raise Exception("Filename for Cookies is too short!") if not name.endswith(".txt"): name = name + ".txt" folder = constants.SavedCookies.STORAGE_FOLDER abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % folder cookies_file_path = "%s/%s" % (file_path, name) f = open(cookies_file_path, 'r') json_cookies = f.read().strip() f.close() cookies = json.loads(json_cookies) for cookie in cookies: if 'expiry' in cookie: del cookie['expiry'] self.driver.add_cookie(cookie) def delete_all_cookies(self): """ Deletes all cookies in the web browser. Does NOT delete the saved cookies file. """ self.driver.delete_all_cookies() def delete_saved_cookies(self, name="cookies.txt"): """ Deletes the cookies file from the "saved_cookies" folder. Does NOT delete the cookies from the web browser. """ if name.endswith('/'): raise Exception("Invalid filename for Cookies!") if '/' in name: name = name.split('/')[-1] if len(name) < 1: raise Exception("Filename for Cookies is too short!") if not name.endswith(".txt"): name = name + ".txt" folder = constants.SavedCookies.STORAGE_FOLDER abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % folder cookies_file_path = "%s/%s" % (file_path, name) if os.path.exists(cookies_file_path): if cookies_file_path.endswith('.txt'): os.remove(cookies_file_path) def wait_for_ready_state_complete(self, timeout=None): try: # If there's an alert, skip self.driver.switch_to.alert return except Exception: # If there's no alert, continue pass if not timeout: timeout = settings.EXTREME_TIMEOUT if self.timeout_multiplier and timeout == settings.EXTREME_TIMEOUT: timeout = self.__get_new_timeout(timeout) is_ready = js_utils.wait_for_ready_state_complete(self.driver, timeout) self.wait_for_angularjs(timeout=settings.MINI_TIMEOUT) if self.js_checking_on: self.assert_no_js_errors() if self.ad_block_on: # If the ad_block feature is enabled, then block ads for new URLs current_url = self.get_current_url() if not current_url == self.__last_page_load_url: time.sleep(0.02) self.ad_block() time.sleep(0.01) if self.is_element_present("iframe"): time.sleep(0.07) # iframe ads take slightly longer to load self.ad_block() # Do ad_block on slower-loading iframes self.__last_page_load_url = current_url return is_ready def wait_for_angularjs(self, timeout=None, **kwargs): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) js_utils.wait_for_angularjs(self.driver, timeout, **kwargs) def sleep(self, seconds): if not sb_config.time_limit: time.sleep(seconds) else: start_ms = time.time() * 1000.0 stop_ms = start_ms + (seconds * 1000.0) for x in range(int(seconds * 5)): shared_utils.check_if_time_limit_exceeded() now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(0.2) def activate_jquery(self): """ If "jQuery is not defined", use this method to activate it for use. This happens because jQuery is not always defined on web sites. """ js_utils.activate_jquery(self.driver) self.wait_for_ready_state_complete() def __are_quotes_escaped(self, string): return js_utils.are_quotes_escaped(string) def __escape_quotes_if_needed(self, string): return js_utils.escape_quotes_if_needed(string) def bring_to_front(self, selector, by=By.CSS_SELECTOR): """ Updates the Z-index of a page element to bring it into view. Useful when getting a WebDriverException, such as the one below: { Element is not clickable at point (#, #). Other element would receive the click: ... } """ if page_utils.is_xpath_selector(selector): by = By.XPATH self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't run action if can't convert to CSS_Selector for JavaScript return selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) script = ("""document.querySelector('%s').style.zIndex = '999999';""" % selector) self.execute_script(script) def highlight_click(self, selector, by=By.CSS_SELECTOR, loops=3, scroll=True): if not self.demo_mode: self.highlight(selector, by=by, loops=loops, scroll=scroll) self.click(selector, by=by) def highlight_update_text(self, selector, new_value, by=By.CSS_SELECTOR, loops=3, scroll=True): if not self.demo_mode: self.highlight(selector, by=by, loops=loops, scroll=scroll) self.update_text(selector, new_value, by=by) def highlight(self, selector, by=By.CSS_SELECTOR, loops=None, scroll=True): """ This method uses fancy JavaScript to highlight an element. Used during demo_mode. @Params selector - the selector of the element to find by - the type of selector to search by (Default: CSS) loops - # of times to repeat the highlight animation (Default: 4. Each loop lasts for about 0.18s) scroll - the option to scroll to the element first (Default: True) """ selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) if not loops: loops = settings.HIGHLIGHTS if scroll: try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__slow_scroll_to_element(element) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't highlight if can't convert to CSS_SELECTOR return if self.highlights: loops = self.highlights if self.browser == 'ie': loops = 1 # Override previous setting because IE is slow loops = int(loops) o_bs = '' # original_box_shadow try: style = element.get_attribute('style') except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=By.CSS_SELECTOR, timeout=settings.SMALL_TIMEOUT) style = element.get_attribute('style') if style: if 'box-shadow: ' in style: box_start = style.find('box-shadow: ') box_end = style.find(';', box_start) + 1 original_box_shadow = style[box_start:box_end] o_bs = original_box_shadow if ":contains" not in selector and ":first" not in selector: selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) self.__highlight_with_js(selector, loops, o_bs) else: selector = self.__make_css_match_first_element_only(selector) selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) try: self.__highlight_with_jquery(selector, loops, o_bs) except Exception: pass # JQuery probably couldn't load. Skip highlighting. time.sleep(0.065) def __highlight_with_js(self, selector, loops, o_bs): js_utils.highlight_with_js(self.driver, selector, loops, o_bs) def __highlight_with_jquery(self, selector, loops, o_bs): js_utils.highlight_with_jquery(self.driver, selector, loops, o_bs) def press_up_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): """ Simulates pressing the UP Arrow on the keyboard. By default, "html" will be used as the CSS Selector target. You can specify how many times in-a-row the action happens. """ if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_UP) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_UP) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def press_down_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): """ Simulates pressing the DOWN Arrow on the keyboard. By default, "html" will be used as the CSS Selector target. You can specify how many times in-a-row the action happens. """ if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_DOWN) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_DOWN) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def press_left_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): """ Simulates pressing the LEFT Arrow on the keyboard. By default, "html" will be used as the CSS Selector target. You can specify how many times in-a-row the action happens. """ if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_LEFT) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_LEFT) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def press_right_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): """ Simulates pressing the RIGHT Arrow on the keyboard. By default, "html" will be used as the CSS Selector target. You can specify how many times in-a-row the action happens. """ if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_RIGHT) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_RIGHT) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def scroll_to(self, selector, by=By.CSS_SELECTOR, timeout=None): ''' Fast scroll to destination ''' if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if self.demo_mode or self.slow_mode: self.slow_scroll_to(selector, by=by, timeout=timeout) return element = self.wait_for_element_visible( selector, by=by, timeout=timeout) try: self.__scroll_to_element(element, selector, by) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__scroll_to_element(element, selector, by) def slow_scroll_to(self, selector, by=By.CSS_SELECTOR, timeout=None): ''' Slow motion scroll to destination ''' if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__slow_scroll_to_element(element) def scroll_to_top(self): """ Scroll to the top of the page. """ scroll_script = "window.scrollTo(0, 0);" try: self.execute_script(scroll_script) time.sleep(0.012) return True except Exception: return False def scroll_to_bottom(self): """ Scroll to the bottom of the page. """ scroll_script = "window.scrollTo(0, 10000);" try: self.execute_script(scroll_script) time.sleep(0.012) return True except Exception: return False def click_xpath(self, xpath): # Technically self.click() will automatically detect an xpath selector, # so self.click_xpath() is just a longer name for the same action. self.click(xpath, by=By.XPATH) def js_click(self, selector, by=By.CSS_SELECTOR, all_matches=False): """ Clicks an element using pure JS. Does not use jQuery. If "all_matches" is False, only the first match is clicked. """ selector, by = self.__recalculate_selector(selector, by) if by == By.LINK_TEXT: message = ( "Pure JavaScript doesn't support clicking by Link Text. " "You may want to use self.jquery_click() instead, which " "allows this with :contains(), assuming jQuery isn't blocked. " "For now, self.js_click() will use a regular WebDriver click.") logging.debug(message) self.click(selector, by=by) return element = self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) if self.is_element_visible(selector, by=by): self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) if not all_matches: self.__js_click(selector, by=by) # The real "magic" happens else: self.__js_click_all(selector, by=by) # The real "magic" happens self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def js_click_all(self, selector, by=By.CSS_SELECTOR): """ Clicks all matching elements using pure JS. (No jQuery) """ self.js_click(selector, by=By.CSS_SELECTOR, all_matches=True) def jquery_click(self, selector, by=By.CSS_SELECTOR): """ Clicks an element using jQuery. Different from using pure JS. """ selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) if self.is_element_visible(selector, by=by): self.__demo_mode_highlight_if_active(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) click_script = """jQuery('%s')[0].click()""" % selector self.safe_execute_script(click_script) self.__demo_mode_pause_if_active() def jquery_click_all(self, selector, by=By.CSS_SELECTOR): """ Clicks all matching elements using jQuery. """ selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) if self.is_element_visible(selector, by=by): self.__demo_mode_highlight_if_active(selector, by) selector = self.convert_to_css_selector(selector, by=by) click_script = """jQuery('%s').click()""" % selector self.safe_execute_script(click_script) self.__demo_mode_pause_if_active() def hide_element(self, selector, by=By.CSS_SELECTOR): """ Hide the first element on the page that matches the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) hide_script = """jQuery('%s').hide()""" % selector self.safe_execute_script(hide_script) def hide_elements(self, selector, by=By.CSS_SELECTOR): """ Hide all elements on the page that match the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) hide_script = """jQuery('%s').hide()""" % selector self.safe_execute_script(hide_script) def show_element(self, selector, by=By.CSS_SELECTOR): """ Show the first element on the page that matches the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) show_script = """jQuery('%s').show(0)""" % selector self.safe_execute_script(show_script) def show_elements(self, selector, by=By.CSS_SELECTOR): """ Show all elements on the page that match the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) show_script = """jQuery('%s').show(0)""" % selector self.safe_execute_script(show_script) def remove_element(self, selector, by=By.CSS_SELECTOR): """ Remove the first element on the page that matches the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) remove_script = """jQuery('%s').remove()""" % selector self.safe_execute_script(remove_script) def remove_elements(self, selector, by=By.CSS_SELECTOR): """ Remove all elements on the page that match the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) remove_script = """jQuery('%s').remove()""" % selector self.safe_execute_script(remove_script) def ad_block(self): """ Block ads that appear on the current web page. """ self.wait_for_ready_state_complete() from seleniumbase.config import ad_block_list for css_selector in ad_block_list.AD_BLOCK_LIST: css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ $elements[index].remove();}""" % css_selector) try: self.execute_script(script) except Exception: pass # Don't fail test if ad_blocking fails def block_ads(self): """ Same as ad_block() """ self.ad_block() def get_domain_url(self, url): return page_utils.get_domain_url(url) def get_beautiful_soup(self, source=None): """ BeautifulSoup is a toolkit for dissecting an HTML document and extracting what you need. It's great for screen-scraping! """ from bs4 import BeautifulSoup if not source: self.wait_for_ready_state_complete() source = self.get_page_source() soup = BeautifulSoup(source, "html.parser") return soup def get_unique_links(self): """ Get all unique links in the html of the page source. Page links include those obtained from: "a"->"href", "img"->"src", "link"->"href", and "script"->"src". """ page_url = self.get_current_url() soup = self.get_beautiful_soup(self.get_page_source()) links = page_utils._get_unique_links(page_url, soup) return links def get_link_status_code(self, link, allow_redirects=False, timeout=5): """ Get the status code of a link. If the timeout is exceeded, will return a 404. For a list of available status codes, see: https://en.wikipedia.org/wiki/List_of_HTTP_status_codes """ status_code = page_utils._get_link_status_code( link, allow_redirects=allow_redirects, timeout=timeout) return status_code def assert_link_status_code_is_not_404(self, link): status_code = str(self.get_link_status_code(link)) bad_link_str = 'Error: "%s" returned a 404!' % link self.assertNotEqual(status_code, "404", bad_link_str) def assert_no_404_errors(self, multithreaded=True): """ Assert no 404 errors from page links obtained from: "a"->"href", "img"->"src", "link"->"href", and "script"->"src". """ all_links = self.get_unique_links() links = [] for link in all_links: if "javascript:" not in link and "mailto:" not in link: links.append(link) if multithreaded: from multiprocessing.dummy import Pool as ThreadPool pool = ThreadPool(10) pool.map(self.assert_link_status_code_is_not_404, links) pool.close() pool.join() else: for link in links: self.assert_link_status_code_is_not_404(link) if self.demo_mode: messenger_post = ("ASSERT NO 404 ERRORS") self.__highlight_with_assert_success(messenger_post, "html") def print_unique_links_with_status_codes(self): """ Finds all unique links in the html of the page source and then prints out those links with their status codes. Format: ["link" -> "status_code"] (per line) Page links include those obtained from: "a"->"href", "img"->"src", "link"->"href", and "script"->"src". """ page_url = self.get_current_url() soup = self.get_beautiful_soup(self.get_page_source()) page_utils._print_unique_links_with_status_codes(page_url, soup) def __fix_unicode_conversion(self, text): """ Fixing Chinese characters when converting from PDF to HTML. """ if sys.version_info[0] < 3: # Update encoding for Python 2 users reload(sys) # noqa sys.setdefaultencoding('utf8') text = text.replace(u'\u2f8f', u'\u884c') text = text.replace(u'\u2f45', u'\u65b9') text = text.replace(u'\u2f08', u'\u4eba') text = text.replace(u'\u2f70', u'\u793a') return text def get_pdf_text(self, pdf, page=None, maxpages=None, password=None, codec='utf-8', wrap=False, nav=False, override=False): """ Gets text from a PDF file. PDF can be either a URL or a file path on the local file system. @Params pdf - The URL or file path of the PDF file. page - The page number (or a list of page numbers) of the PDF. If a page number is provided, looks only at that page. (1 is the first page, 2 is the second page, etc.) If no page number is provided, returns all PDF text. maxpages - Instead of providing a page number, you can provide the number of pages to use from the beginning. password - If the PDF is password-protected, enter it here. codec - The compression format for character encoding. (The default codec used by this method is 'utf-8'.) wrap - Replaces ' \n' with ' ' so that individual sentences from a PDF don't get broken up into seperate lines when getting converted into text format. nav - If PDF is a URL, navigates to the URL in the browser first. (Not needed because the PDF will be downloaded anyway.) override - If the PDF file to be downloaded already exists in the downloaded_files/ folder, that PDF will be used instead of downloading it again. """ import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore", category=UserWarning) from pdfminer.high_level import extract_text if not password: password = '' if not maxpages: maxpages = 0 if not pdf.lower().endswith('.pdf'): raise Exception("%s is not a PDF file! (Expecting a .pdf)" % pdf) file_path = None if page_utils.is_valid_url(pdf): if nav: if self.get_current_url() != pdf: self.open(pdf) file_name = pdf.split('/')[-1] file_path = self.get_downloads_folder() + '/' + file_name if not os.path.exists(file_path): self.download_file(pdf) elif override: self.download_file(pdf) else: if not os.path.exists(pdf): raise Exception("%s is not a valid URL or file path!" % pdf) file_path = os.path.abspath(pdf) page_search = None # (Pages are delimited by '\x0c') if type(page) is list: pages = page page_search = [] for page in pages: page_search.append(page - 1) elif type(page) is int: page = page - 1 if page < 0: page = 0 page_search = [page] else: page_search = None pdf_text = extract_text( file_path, password='', page_numbers=page_search, maxpages=maxpages, caching=False, codec=codec) pdf_text = self.__fix_unicode_conversion(pdf_text) if wrap: pdf_text = pdf_text.replace(' \n', ' ') return pdf_text def assert_pdf_text(self, pdf, text, page=None, maxpages=None, password=None, codec='utf-8', wrap=True, nav=False, override=False): """ Asserts text in a PDF file. PDF can be either a URL or a file path on the local file system. @Params pdf - The URL or file path of the PDF file. text - The expected text to verify in the PDF. page - The page number of the PDF to use (optional). If a page number is provided, looks only at that page. (1 is the first page, 2 is the second page, etc.) If no page number is provided, looks at all the pages. maxpages - Instead of providing a page number, you can provide the number of pages to use from the beginning. password - If the PDF is password-protected, enter it here. codec - The compression format for character encoding. (The default codec used by this method is 'utf-8'.) wrap - Replaces ' \n' with ' ' so that individual sentences from a PDF don't get broken up into seperate lines when getting converted into text format. nav - If PDF is a URL, navigates to the URL in the browser first. (Not needed because the PDF will be downloaded anyway.) override - If the PDF file to be downloaded already exists in the downloaded_files/ folder, that PDF will be used instead of downloading it again. """ text = self.__fix_unicode_conversion(text) if not codec: codec = 'utf-8' pdf_text = self.get_pdf_text( pdf, page=page, maxpages=maxpages, password=password, codec=codec, wrap=wrap, nav=nav, override=override) if type(page) is int: if text not in pdf_text: raise Exception("PDF [%s] is missing expected text [%s] on " "page [%s]!" % (pdf, text, page)) else: if text not in pdf_text: raise Exception("PDF [%s] is missing expected text [%s]!" "" % (pdf, text)) return True def create_folder(self, folder): """ Creates a folder of the given name if it doesn't already exist. """ if folder.endswith("/"): folder = folder[:-1] if len(folder) < 1: raise Exception("Minimum folder name length = 1.") if not os.path.exists(folder): try: os.makedirs(folder) except Exception: pass def choose_file(self, selector, file_path, by=By.CSS_SELECTOR, timeout=None): """ This method is used to choose a file to upload to a website. It works by populating a file-chooser "input" field of type="file". A relative file_path will get converted into an absolute file_path. Example usage: self.choose_file('input[type="file"]', "my_dir/my_file.txt") """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH abs_path = os.path.abspath(file_path) self.add_text(selector, abs_path, by=by, timeout=timeout) def save_element_as_image_file(self, selector, file_name, folder=None): """ Take a screenshot of an element and save it as an image file. If no folder is specified, will save it to the current folder. """ element = self.wait_for_element_visible(selector) element_png = element.screenshot_as_png if len(file_name.split('.')[0]) < 1: raise Exception("Error: file_name length must be > 0.") if not file_name.endswith(".png"): file_name = file_name + ".png" image_file_path = None if folder: if folder.endswith("/"): folder = folder[:-1] if len(folder) > 0: self.create_folder(folder) image_file_path = "%s/%s" % (folder, file_name) if not image_file_path: image_file_path = file_name with open(image_file_path, "wb") as file: file.write(element_png) def download_file(self, file_url, destination_folder=None): """ Downloads the file from the url to the destination folder. If no destination folder is specified, the default one is used. (The default downloads folder = "./downloaded_files") """ if not destination_folder: destination_folder = constants.Files.DOWNLOADS_FOLDER if not os.path.exists(destination_folder): os.makedirs(destination_folder) page_utils._download_file_to(file_url, destination_folder) def save_file_as(self, file_url, new_file_name, destination_folder=None): """ Similar to self.download_file(), except that you get to rename the file being downloaded to whatever you want. """ if not destination_folder: destination_folder = constants.Files.DOWNLOADS_FOLDER page_utils._download_file_to( file_url, destination_folder, new_file_name) def save_data_as(self, data, file_name, destination_folder=None): """ Saves the data specified to a file of the name specified. If no destination folder is specified, the default one is used. (The default downloads folder = "./downloaded_files") """ if not destination_folder: destination_folder = constants.Files.DOWNLOADS_FOLDER page_utils._save_data_as(data, destination_folder, file_name) def get_downloads_folder(self): """ Returns the OS path of the Downloads Folder. (Works with Chrome and Firefox only, for now.) """ return download_helper.get_downloads_folder() def get_path_of_downloaded_file(self, file): """ Returns the OS path of the downloaded file. """ return os.path.join(self.get_downloads_folder(), file) def is_downloaded_file_present(self, file): """ Checks if the file exists in the Downloads Folder. """ return os.path.exists(self.get_path_of_downloaded_file(file)) def assert_downloaded_file(self, file, timeout=None): """ Asserts that the file exists in the Downloads Folder. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) start_ms = time.time() * 1000.0 stop_ms = start_ms + (timeout * 1000.0) for x in range(int(timeout)): shared_utils.check_if_time_limit_exceeded() try: self.assertTrue( os.path.exists(self.get_path_of_downloaded_file(file)), "File [%s] was not found in the downloads folder [%s]!" "" % (file, self.get_downloads_folder())) if self.demo_mode: messenger_post = ("ASSERT DOWNLOADED FILE: [%s]" % file) js_utils.post_messenger_success_message( self.driver, messenger_post, self.message_duration) return except Exception: now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(1) self.assertTrue( os.path.exists(self.get_path_of_downloaded_file(file)), "File [%s] was not found in the downloads folder [%s] " "after %s seconds! (Or the download didn't complete!)" "" % (file, self.get_downloads_folder(), timeout)) if self.demo_mode: messenger_post = ("ASSERT DOWNLOADED FILE: [%s]" % file) js_utils.post_messenger_success_message( self.driver, messenger_post, self.message_duration) def assert_true(self, expr, msg=None): """ Asserts that the expression is True. Will raise an exception if the statement if False. """ self.assertTrue(expr, msg=msg) def assert_false(self, expr, msg=None): """ Asserts that the expression is False. Will raise an exception if the statement if True. """ self.assertFalse(expr, msg=msg) def assert_equal(self, first, second, msg=None): """ Asserts that the two values are equal. Will raise an exception if the values are not equal. """ self.assertEqual(first, second, msg=msg) def assert_not_equal(self, first, second, msg=None): """ Asserts that the two values are not equal. Will raise an exception if the values are equal. """ self.assertNotEqual(first, second, msg=msg) def assert_raises(self, *args, **kwargs): """ Asserts that the following block of code raises an exception. Will raise an exception if the block of code has no exception. Usage Example => # Verify that the expected exception is raised. with self.assert_raises(Exception): raise Exception("Expected Exception!") """ self.assertRaises(*args, **kwargs) def assert_title(self, title): """ Asserts that the web page title matches the expected title. """ expected = title actual = self.get_page_title() self.assertEqual(expected, actual, "Expected page title [%s] " "does not match the actual page title [%s]!" "" % (expected, actual)) if self.demo_mode: messenger_post = ("ASSERT TITLE = {%s}" % title) self.__highlight_with_assert_success(messenger_post, "html") def assert_no_js_errors(self): """ Asserts that there are no JavaScript "SEVERE"-level page errors. Works ONLY for Chrome (non-headless) and Chrome-based browsers. Does NOT work on Firefox, Edge, IE, and some other browsers: * See https://github.com/SeleniumHQ/selenium/issues/1161 Based on the following Stack Overflow solution: * https://stackoverflow.com/a/41150512/7058266 """ time.sleep(0.1) # May take a moment for errors to appear after loads. try: browser_logs = self.driver.get_log('browser') except (ValueError, WebDriverException): # If unable to get browser logs, skip the assert and return. return messenger_library = "//cdnjs.cloudflare.com/ajax/libs/messenger" errors = [] for entry in browser_logs: if entry['level'] == 'SEVERE': if messenger_library not in entry['message']: # Add errors if not caused by SeleniumBase dependencies errors.append(entry) if len(errors) > 0: current_url = self.get_current_url() raise Exception( "JavaScript errors found on %s => %s" % (current_url, errors)) if self.demo_mode: if (self.browser == 'chrome' or self.browser == 'edge'): messenger_post = ("ASSERT NO JS ERRORS") self.__highlight_with_assert_success(messenger_post, "html") def __activate_html_inspector(self): self.wait_for_ready_state_complete() time.sleep(0.05) js_utils.activate_html_inspector(self.driver) def inspect_html(self): """ Inspects the Page HTML with HTML-Inspector. (https://github.com/philipwalton/html-inspector) (https://cdnjs.com/libraries/html-inspector) Prints the results and also returns them. """ self.__activate_html_inspector() script = ("""HTMLInspector.inspect();""") self.execute_script(script) time.sleep(0.1) browser_logs = [] try: browser_logs = self.driver.get_log('browser') except (ValueError, WebDriverException): # If unable to get browser logs, skip the assert and return. return("(Unable to Inspect HTML! -> Only works on Chrome!)") messenger_library = "//cdnjs.cloudflare.com/ajax/libs/messenger" url = self.get_current_url() header = '\n* HTML Inspection Results: %s' % url results = [header] row_count = 0 for entry in browser_logs: message = entry['message'] if "0:6053 " in message: message = message.split("0:6053")[1] message = message.replace("\\u003C", "<") if message.startswith(' "') and message.count('"') == 2: message = message.split('"')[1] message = "X - " + message if messenger_library not in message: if message not in results: results.append(message) row_count += 1 if row_count > 0: results.append('* (See the Console output for details!)') else: results.append('* (No issues detected!)') results = '\n'.join(results) print(results) return(results) def get_google_auth_password(self, totp_key=None): """ Returns a time-based one-time password based on the Google Authenticator password algorithm. Works with Authy. If "totp_key" is not specified, defaults to using the one provided in seleniumbase/config/settings.py Google Auth passwords expire and change at 30-second intervals. If the fetched password expires in the next 1.5 seconds, waits for a new one before returning it (may take up to 1.5 seconds). See https://pyotp.readthedocs.io/en/latest/ for details. """ import pyotp if not totp_key: totp_key = settings.TOTP_KEY epoch_interval = time.time() / 30.0 cycle_lifespan = float(epoch_interval) - int(epoch_interval) if float(cycle_lifespan) > 0.95: # Password expires in the next 1.5 seconds. Wait for a new one. for i in range(30): time.sleep(0.05) epoch_interval = time.time() / 30.0 cycle_lifespan = float(epoch_interval) - int(epoch_interval) if not float(cycle_lifespan) > 0.95: # The new password cycle has begun break totp = pyotp.TOTP(totp_key) return str(totp.now()) def convert_xpath_to_css(self, xpath): return xpath_to_css.convert_xpath_to_css(xpath) def convert_to_css_selector(self, selector, by): """ This method converts a selector to a CSS_SELECTOR. jQuery commands require a CSS_SELECTOR for finding elements. This method should only be used for jQuery/JavaScript actions. Pure JavaScript doesn't support using a:contains("LINK_TEXT"). """ if by == By.CSS_SELECTOR: return selector elif by == By.ID: return '#%s' % selector elif by == By.CLASS_NAME: return '.%s' % selector elif by == By.NAME: return '[name="%s"]' % selector elif by == By.TAG_NAME: return selector elif by == By.XPATH: return self.convert_xpath_to_css(selector) elif by == By.LINK_TEXT: return 'a:contains("%s")' % selector elif by == By.PARTIAL_LINK_TEXT: return 'a:contains("%s")' % selector else: raise Exception( "Exception: Could not convert {%s}(by=%s) to CSS_SELECTOR!" % ( selector, by)) def set_value(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): """ This method uses JavaScript to update a text field. """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH orginal_selector = selector css_selector = self.convert_to_css_selector(selector, by=by) self.__demo_mode_highlight_if_active(orginal_selector, by) if not self.demo_mode: self.scroll_to(orginal_selector, by=by, timeout=timeout) value = re.escape(new_value) value = self.__escape_quotes_if_needed(value) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""document.querySelector('%s').value='%s';""" % (css_selector, value)) self.execute_script(script) if new_value.endswith('\n'): element = self.wait_for_element_present( orginal_selector, by=by, timeout=timeout) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def js_update_text(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): """ Same as self.set_value() """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.set_value( selector, new_value, by=by, timeout=timeout) def jquery_update_text(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): """ This method uses jQuery to update a text field. If the new_value string ends with the newline character, WebDriver will finish the call, which simulates pressing {Enter/Return} after the text is entered. """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) self.scroll_to(selector, by=by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) selector = self.__escape_quotes_if_needed(selector) new_value = re.escape(new_value) new_value = self.__escape_quotes_if_needed(new_value) update_text_script = """jQuery('%s').val('%s')""" % ( selector, new_value) self.safe_execute_script(update_text_script) if new_value.endswith('\n'): element.send_keys('\n') self.__demo_mode_pause_if_active() def set_time_limit(self, time_limit): if time_limit: try: sb_config.time_limit = float(time_limit) except Exception: sb_config.time_limit = None else: sb_config.time_limit = None if sb_config.time_limit and sb_config.time_limit > 0: sb_config.time_limit_ms = int(sb_config.time_limit * 1000.0) self.time_limit = sb_config.time_limit else: self.time_limit = None sb_config.time_limit = None sb_config.time_limit_ms = None def skip(self, reason=""): """ Mark the test as Skipped. """ self.skipTest(reason) ############ def add_css_link(self, css_link): js_utils.add_css_link(self.driver, css_link) def add_js_link(self, js_link): js_utils.add_js_link(self.driver, js_link) def add_css_style(self, css_style): js_utils.add_css_style(self.driver, css_style) def add_js_code_from_link(self, js_link): js_utils.add_js_code_from_link(self.driver, js_link) def add_js_code(self, js_code): js_utils.add_js_code(self.driver, js_code) def add_meta_tag(self, http_equiv=None, content=None): js_utils.add_meta_tag( self.driver, http_equiv=http_equiv, content=content) ############ def create_tour(self, name=None, theme=None): """ Creates a tour for a website. By default, the Shepherd JavaScript Library is used with the Shepherd "Light" / "Arrows" theme. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. theme - Sets the default theme for the tour. Choose from "light"/"arrows", "dark", "default", "square", and "square-dark". ("arrows" is used if None is selected.) Alternatively, you may use a different JavaScript Library as the theme. Those include "IntroJS", "Bootstrap", and "Hopscotch". """ if not name: name = "default" if theme: if theme.lower() == "bootstrap": self.create_bootstrap_tour(name) return elif theme.lower() == "hopscotch": self.create_hopscotch_tour(name) return elif theme.lower() == "intro": self.create_introjs_tour(name) return elif theme.lower() == "introjs": self.create_introjs_tour(name) return elif theme.lower() == "shepherd": self.create_shepherd_tour(name, theme="light") return else: self.create_shepherd_tour(name, theme) else: self.create_shepherd_tour(name, theme="light") def create_shepherd_tour(self, name=None, theme=None): """ Creates a Shepherd JS website tour. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. theme - Sets the default theme for the tour. Choose from "light"/"arrows", "dark", "default", "square", and "square-dark". ("light" is used if None is selected.) """ shepherd_theme = "shepherd-theme-arrows" if theme: if theme.lower() == "default": shepherd_theme = "shepherd-theme-default" elif theme.lower() == "dark": shepherd_theme = "shepherd-theme-dark" elif theme.lower() == "light": shepherd_theme = "shepherd-theme-arrows" elif theme.lower() == "arrows": shepherd_theme = "shepherd-theme-arrows" elif theme.lower() == "square": shepherd_theme = "shepherd-theme-square" elif theme.lower() == "square-dark": shepherd_theme = "shepherd-theme-square-dark" if not name: name = "default" new_tour = ( """ // Shepherd Tour var tour = new Shepherd.Tour({ defaults: { classes: '%s', scrollTo: true } }); var allButtons = { skip: { text: "Skip", action: tour.cancel, classes: 'shepherd-button-secondary tour-button-left' }, back: { text: "Back", action: tour.back, classes: 'shepherd-button-secondary' }, next: { text: "Next", action: tour.next, classes: 'shepherd-button-primary tour-button-right' }, }; var firstStepButtons = [allButtons.skip, allButtons.next]; var midTourButtons = [allButtons.back, allButtons.next]; """ % shepherd_theme) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def create_bootstrap_tour(self, name=None): """ Creates a Bootstrap tour for a website. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. """ if not name: name = "default" new_tour = ( """ // Bootstrap Tour var tour = new Tour({ }); tour.addSteps([ """) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def create_hopscotch_tour(self, name=None): """ Creates an Hopscotch tour for a website. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. """ if not name: name = "default" new_tour = ( """ // Hopscotch Tour var tour = { id: "hopscotch_tour", steps: [ """) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def create_introjs_tour(self, name=None): """ Creates an IntroJS tour for a website. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. """ if not name: name = "default" new_tour = ( """ // IntroJS Tour function startIntro(){ var intro = introJs(); intro.setOptions({ steps: [ """) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def add_tour_step(self, message, selector=None, name=None, title=None, theme=None, alignment=None, duration=None): """ Allows the user to add tour steps for a website. @Params message - The message to display. selector - The CSS Selector of the Element to attach to. name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. title - Additional header text that appears above the message. theme - (NON-Bootstrap Tours ONLY) The styling of the tour step. Choose from "light"/"arrows", "dark", "default", "square", and "square-dark". ("arrows" is used if None is selected.) alignment - Choose from "top", "bottom", "left", and "right". ("top" is the default alignment). duration - (Bootstrap Tours ONLY) The amount of time, in seconds, before automatically advancing to the next tour step. """ if not selector: selector = "html" if page_utils.is_xpath_selector(selector): selector = self.convert_to_css_selector(selector, By.XPATH) selector = self.__escape_quotes_if_needed(selector) if not name: name = "default" if name not in self._tour_steps: # By default, will create an IntroJS tour if no tours exist self.create_tour(name=name, theme="introjs") if not title: title = "" title = self.__escape_quotes_if_needed(title) if message: message = self.__escape_quotes_if_needed(message) else: message = "" if not alignment or ( alignment not in ["top", "bottom", "left", "right"]): if "Hopscotch" not in self._tour_steps[name][0]: alignment = "top" else: alignment = "bottom" if "Bootstrap" in self._tour_steps[name][0]: self.__add_bootstrap_tour_step( message, selector=selector, name=name, title=title, alignment=alignment, duration=duration) elif "Hopscotch" in self._tour_steps[name][0]: self.__add_hopscotch_tour_step( message, selector=selector, name=name, title=title, alignment=alignment) elif "IntroJS" in self._tour_steps[name][0]: self.__add_introjs_tour_step( message, selector=selector, name=name, title=title, alignment=alignment) else: self.__add_shepherd_tour_step( message, selector=selector, name=name, title=title, theme=theme, alignment=alignment) def __add_shepherd_tour_step(self, message, selector=None, name=None, title=None, theme=None, alignment=None): """ Allows the user to add tour steps for a website. @Params message - The message to display. selector - The CSS Selector of the Element to attach to. name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. title - Additional header text that appears above the message. theme - (NON-Bootstrap Tours ONLY) The styling of the tour step. Choose from "light"/"arrows", "dark", "default", "square", and "square-dark". ("arrows" is used if None is selected.) alignment - Choose from "top", "bottom", "left", and "right". ("top" is the default alignment). """ if theme == "default": shepherd_theme = "shepherd-theme-default" elif theme == "dark": shepherd_theme = "shepherd-theme-dark" elif theme == "light": shepherd_theme = "shepherd-theme-arrows" elif theme == "arrows": shepherd_theme = "shepherd-theme-arrows" elif theme == "square": shepherd_theme = "shepherd-theme-square" elif theme == "square-dark": shepherd_theme = "shepherd-theme-square-dark" else: shepherd_base_theme = re.search( r"[\S\s]+classes: '([\S\s]+)',[\S\s]+", self._tour_steps[name][0]).group(1) shepherd_theme = shepherd_base_theme shepherd_classes = shepherd_theme if selector == "html": shepherd_classes += " shepherd-orphan" buttons = "firstStepButtons" if len(self._tour_steps[name]) > 1: buttons = "midTourButtons" step = (""" tour.addStep('%s', { title: '%s', classes: '%s', text: '%s', attachTo: {element: '%s', on: '%s'}, buttons: %s, advanceOn: '.docs-link click' });""" % ( name, title, shepherd_classes, message, selector, alignment, buttons)) self._tour_steps[name].append(step) def __add_bootstrap_tour_step(self, message, selector=None, name=None, title=None, alignment=None, duration=None): """ Allows the user to add tour steps for a website. @Params message - The message to display. selector - The CSS Selector of the Element to attach to. name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. title - Additional header text that appears above the message. alignment - Choose from "top", "bottom", "left", and "right". ("top" is the default alignment). duration - (Bootstrap Tours ONLY) The amount of time, in seconds, before automatically advancing to the next tour step. """ if selector != "html": selector = self.__make_css_match_first_element_only(selector) element_row = "element: '%s'," % selector else: element_row = "" if not duration: duration = "0" else: duration = str(float(duration) * 1000.0) step = ("""{ %s title: '%s', content: '%s', orphan: true, placement: 'auto %s', smartPlacement: true, duration: %s, },""" % (element_row, title, message, alignment, duration)) self._tour_steps[name].append(step) def __add_hopscotch_tour_step(self, message, selector=None, name=None, title=None, alignment=None): """ Allows the user to add tour steps for a website. @Params message - The message to display. selector - The CSS Selector of the Element to attach to. name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. title - Additional header text that appears above the message. alignment - Choose from "top", "bottom", "left", and "right". ("bottom" is the default alignment). """ arrow_offset_row = None if not selector or selector == "html": selector = "head" alignment = "bottom" arrow_offset_row = "arrowOffset: '200'," else: arrow_offset_row = "" step = ("""{ target: '%s', title: '%s', content: '%s', %s showPrevButton: 'true', scrollDuration: '550', placement: '%s'}, """ % (selector, title, message, arrow_offset_row, alignment)) self._tour_steps[name].append(step) def __add_introjs_tour_step(self, message, selector=None, name=None, title=None, alignment=None): """ Allows the user to add tour steps for a website. @Params message - The message to display. selector - The CSS Selector of the Element to attach to. name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. title - Additional header text that appears above the message. alignment - Choose from "top", "bottom", "left", and "right". ("top" is the default alignment). """ if selector != "html": element_row = "element: '%s'," % selector else: element_row = "" if title: message = "<center><b>" + title + "</b></center><hr>" + message message = '<font size=\"3\" color=\"#33475B\">' + message + '</font>' step = ("""{%s intro: '%s', position: '%s'}, """ % (element_row, message, alignment)) self._tour_steps[name].append(step) def play_tour(self, name=None, interval=0): """ Plays a tour on the current website. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. interval - The delay time between autoplaying tour steps. If set to 0 (default), the tour is fully manual control. """ if self.headless: return # Tours should not run in headless mode. if not name: name = "default" if name not in self._tour_steps: raise Exception("Tour {%s} does not exist!" % name) if "Bootstrap" in self._tour_steps[name][0]: tour_helper.play_bootstrap_tour( self.driver, self._tour_steps, self.browser, self.message_duration, name=name, interval=interval) elif "Hopscotch" in self._tour_steps[name][0]: tour_helper.play_hopscotch_tour( self.driver, self._tour_steps, self.browser, self.message_duration, name=name, interval=interval) elif "IntroJS" in self._tour_steps[name][0]: tour_helper.play_introjs_tour( self.driver, self._tour_steps, self.browser, self.message_duration, name=name, interval=interval) else: # "Shepherd" tour_helper.play_shepherd_tour( self.driver, self._tour_steps, self.message_duration, name=name, interval=interval) def export_tour(self, name=None, filename="my_tour.js", url=None): """ Exports a tour as a JS file. You can call self.export_tour() anywhere where you would normally use self.play_tour() to play a tour. It will include necessary resources as well, such as jQuery. You'll be able to copy the tour directly into the Console of any web browser to play the tour outside of SeleniumBase runs. @Params name - If creating multiple tours at the same time, use this to select the tour you wish to add steps to. filename - The name of the JavaScript file that you wish to save the tour to. """ if not url: url = self.get_current_url() tour_helper.export_tour( self._tour_steps, name=name, filename=filename, url=url) def activate_jquery_confirm(self): """ See https://craftpip.github.io/jquery-confirm/ for usage. """ js_utils.activate_jquery_confirm(self.driver) self.wait_for_ready_state_complete() def activate_messenger(self): js_utils.activate_messenger(self.driver) self.wait_for_ready_state_complete() def set_messenger_theme(self, theme="default", location="default", max_messages="default"): """ Sets a theme for posting messages. Themes: ["flat", "future", "block", "air", "ice"] Locations: ["top_left", "top_center", "top_right", "bottom_left", "bottom_center", "bottom_right"] max_messages is the limit of concurrent messages to display. """ if not theme: theme = "default" # "future" if not location: location = "default" # "bottom_right" if not max_messages: max_messages = "default" # "8" js_utils.set_messenger_theme( self.driver, theme=theme, location=location, max_messages=max_messages) def post_message(self, message, duration=None, pause=True, style="info"): """ Post a message on the screen with Messenger. Arguments: message: The message to display. duration: The time until the message vanishes. (Default: 2.55s) pause: If True, the program waits until the message completes. style: "info", "success", or "error". You can also post messages by using => self.execute_script('Messenger().post("My Message")') """ if not duration: if not self.message_duration: duration = settings.DEFAULT_MESSAGE_DURATION else: duration = self.message_duration js_utils.post_message( self.driver, message, duration, style=style) if pause: duration = float(duration) + 0.15 time.sleep(float(duration)) def post_success_message(self, message, duration=None, pause=True): """ Post a success message on the screen with Messenger. Arguments: message: The success message to display. duration: The time until the message vanishes. (Default: 2.55s) pause: If True, the program waits until the message completes. """ if not duration: if not self.message_duration: duration = settings.DEFAULT_MESSAGE_DURATION else: duration = self.message_duration js_utils.post_message( self.driver, message, duration, style="success") if pause: duration = float(duration) + 0.15 time.sleep(float(duration)) def post_error_message(self, message, duration=None, pause=True): """ Post an error message on the screen with Messenger. Arguments: message: The error message to display. duration: The time until the message vanishes. (Default: 2.55s) pause: If True, the program waits until the message completes. """ if not duration: if not self.message_duration: duration = settings.DEFAULT_MESSAGE_DURATION else: duration = self.message_duration js_utils.post_message( self.driver, message, duration, style="error") if pause: duration = float(duration) + 0.15 time.sleep(float(duration)) ############ def generate_referral(self, start_page, destination_page): """ This method opens the start_page, creates a referral link there, and clicks on that link, which goes to the destination_page. (This generates real traffic for testing analytics software.) """ if not page_utils.is_valid_url(destination_page): raise Exception( "Exception: destination_page {%s} is not a valid URL!" % destination_page) if start_page: if not page_utils.is_valid_url(start_page): raise Exception( "Exception: start_page {%s} is not a valid URL! " "(Use an empty string or None to start from current page.)" % start_page) self.open(start_page) time.sleep(0.08) self.wait_for_ready_state_complete() referral_link = ('''<body>''' '''<a class='analytics referral test' href='%s' ''' '''style='font-family: Arial,sans-serif; ''' '''font-size: 30px; color: #18a2cd'>''' '''Magic Link Button</a></body>''' % destination_page) self.execute_script( '''document.body.outerHTML = \"%s\"''' % referral_link) self.click( "a.analytics.referral.test", timeout=2) # Clicks generated button time.sleep(0.15) try: self.click("html") time.sleep(0.08) except Exception: pass def generate_traffic(self, start_page, destination_page, loops=1): """ Similar to generate_referral(), but can do multiple loops. """ for loop in range(loops): self.generate_referral(start_page, destination_page) time.sleep(0.05) def generate_referral_chain(self, pages): """ Use this method to chain the action of creating button links on one website page that will take you to the next page. (When you want to create a referral to a website for traffic generation without increasing the bounce rate, you'll want to visit at least one additional page on that site with a button click.) """ if not type(pages) is tuple and not type(pages) is list: raise Exception( "Exception: Expecting a list of website pages for chaining!") if len(pages) < 2: raise Exception( "Exception: At least two website pages required for chaining!") for page in pages: # Find out if any of the web pages are invalid before continuing if not page_utils.is_valid_url(page): raise Exception( "Exception: Website page {%s} is not a valid URL!" % page) for page in pages: self.generate_referral(None, page) def generate_traffic_chain(self, pages, loops=1): """ Similar to generate_referral_chain(), but for multiple loops. """ for loop in range(loops): self.generate_referral_chain(pages) time.sleep(0.05) ############ def wait_for_element_present(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Waits for an element to appear in the HTML of a page. The element does not need be visible (it may be hidden). """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_element_present( self.driver, selector, by, timeout) def wait_for_element_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Waits for an element to appear in the HTML of a page. The element must be visible (it cannot be hidden). """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_element_visible( self.driver, selector, by, timeout) def wait_for_element(self, selector, by=By.CSS_SELECTOR, timeout=None): """ The shorter version of wait_for_element_visible() """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible(selector, by=by, timeout=timeout) def get_element(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Same as wait_for_element_present() - returns the element. The element does not need be visible (it may be hidden). """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_present(selector, by=by, timeout=timeout) def assert_element_present(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Similar to wait_for_element_present(), but returns nothing. Waits for an element to appear in the HTML of a page. The element does not need be visible (it may be hidden). Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_present(selector, by=by, timeout=timeout) return True def find_element(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Same as wait_for_element_visible() - returns the element """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible(selector, by=by, timeout=timeout) def assert_element(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Similar to wait_for_element_visible(), but returns nothing. As above, will raise an exception if nothing can be found. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_visible(selector, by=by, timeout=timeout) if self.demo_mode: selector, by = self.__recalculate_selector(selector, by) messenger_post = "ASSERT %s: %s" % (by, selector) self.__highlight_with_assert_success(messenger_post, selector, by) return True def assert_element_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Same as self.assert_element() As above, will raise an exception if nothing can be found. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.assert_element(selector, by=by, timeout=timeout) return True ############ def wait_for_text_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_text_visible( self.driver, text, selector, by, timeout) def wait_for_exact_text_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_exact_text_visible( self.driver, text, selector, by, timeout) def wait_for_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ The shorter version of wait_for_text_visible() """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_text_visible( text, selector, by=by, timeout=timeout) def find_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ Same as wait_for_text_visible() - returns the element """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_text_visible( text, selector, by=by, timeout=timeout) def assert_text_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ Same as assert_text() """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.assert_text(text, selector, by=by, timeout=timeout) def assert_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ Similar to wait_for_text_visible() Raises an exception if the element or the text is not found. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_text_visible(text, selector, by=by, timeout=timeout) if self.demo_mode: selector, by = self.__recalculate_selector(selector, by) messenger_post = ("ASSERT TEXT {%s} in %s: %s" % (text, by, selector)) self.__highlight_with_assert_success(messenger_post, selector, by) return True def assert_exact_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ Similar to assert_text(), but the text must be exact, rather than exist as a subset of the full text. (Extra whitespace at the beginning or the end doesn't count.) Raises an exception if the element or the text is not found. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_exact_text_visible( text, selector, by=by, timeout=timeout) if self.demo_mode: selector, by = self.__recalculate_selector(selector, by) messenger_post = ("ASSERT EXACT TEXT {%s} in %s: %s" % (text, by, selector)) self.__highlight_with_assert_success(messenger_post, selector, by) return True ############ def wait_for_link_text_present(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT start_ms = time.time() * 1000.0 stop_ms = start_ms + (timeout * 1000.0) for x in range(int(timeout * 5)): shared_utils.check_if_time_limit_exceeded() try: if not self.is_link_text_present(link_text): raise Exception( "Link text {%s} was not found!" % link_text) return except Exception: now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(0.2) raise Exception( "Link text {%s} was not present after %s seconds!" % ( link_text, timeout)) def wait_for_partial_link_text_present(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT start_ms = time.time() * 1000.0 stop_ms = start_ms + (timeout * 1000.0) for x in range(int(timeout * 5)): shared_utils.check_if_time_limit_exceeded() try: if not self.is_partial_link_text_present(link_text): raise Exception( "Partial Link text {%s} was not found!" % link_text) return except Exception: now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(0.2) raise Exception( "Partial Link text {%s} was not present after %s seconds!" % ( link_text, timeout)) def wait_for_link_text_visible(self, link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible( link_text, by=By.LINK_TEXT, timeout=timeout) def wait_for_link_text(self, link_text, timeout=None): """ The shorter version of wait_for_link_text_visible() """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_link_text_visible(link_text, timeout=timeout) def find_link_text(self, link_text, timeout=None): """ Same as wait_for_link_text_visible() - returns the element """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_link_text_visible(link_text, timeout=timeout) def assert_link_text(self, link_text, timeout=None): """ Similar to wait_for_link_text_visible(), but returns nothing. As above, will raise an exception if nothing can be found. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_link_text_visible(link_text, timeout=timeout) if self.demo_mode: messenger_post = ("ASSERT LINK TEXT {%s}." % link_text) self.__highlight_with_assert_success( messenger_post, link_text, by=By.LINK_TEXT) return True def wait_for_partial_link_text(self, partial_link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible( partial_link_text, by=By.PARTIAL_LINK_TEXT, timeout=timeout) def find_partial_link_text(self, partial_link_text, timeout=None): """ Same as wait_for_partial_link_text() - returns the element """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_partial_link_text( partial_link_text, timeout=timeout) def assert_partial_link_text(self, partial_link_text, timeout=None): """ Similar to wait_for_partial_link_text(), but returns nothing. As above, will raise an exception if nothing can be found. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_partial_link_text(partial_link_text, timeout=timeout) if self.demo_mode: messenger_post = ( "ASSERT PARTIAL LINK TEXT {%s}." % partial_link_text) self.__highlight_with_assert_success( messenger_post, partial_link_text, by=By.PARTIAL_LINK_TEXT) return True ############ def wait_for_element_absent(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Waits for an element to no longer appear in the HTML of a page. A hidden element still counts as appearing in the page HTML. If an element with "hidden" status is acceptable, use wait_for_element_not_visible() instead. """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH return page_actions.wait_for_element_absent( self.driver, selector, by, timeout) def assert_element_absent(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Similar to wait_for_element_absent() - returns nothing. As above, will raise an exception if the element stays present. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_absent(selector, by=by, timeout=timeout) return True ############ def wait_for_element_not_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Waits for an element to no longer be visible on a page. The element can be non-existant in the HTML or hidden on the page to qualify as not visible. """ if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_element_not_visible( self.driver, selector, by, timeout) def assert_element_not_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): """ Similar to wait_for_element_not_visible() - returns nothing. As above, will raise an exception if the element stays visible. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_not_visible(selector, by=by, timeout=timeout) return True ############ def wait_for_text_not_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_text_not_visible( self.driver, text, selector, by, timeout) def assert_text_not_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ Similar to wait_for_text_not_visible() Raises an exception if the element or the text is not found. Returns True if successful. Default timeout = SMALL_TIMEOUT. """ if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_text_not_visible(text, selector, by=by, timeout=timeout) ############ def wait_for_and_accept_alert(self, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return page_actions.wait_for_and_accept_alert(self.driver, timeout) def wait_for_and_dismiss_alert(self, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return page_actions.wait_for_and_dismiss_alert(self.driver, timeout) def wait_for_and_switch_to_alert(self, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return page_actions.wait_for_and_switch_to_alert(self.driver, timeout) ############ def __assert_eq(self, *args, **kwargs): """ Minified assert_equal() using only the list diff. """ minified_exception = None try: self.assertEqual(*args, **kwargs) except Exception as e: str_e = str(e) minified_exception = "\nAssertionError:\n" lines = str_e.split('\n') countdown = 3 countdown_on = False for line in lines: if countdown_on: minified_exception += line + '\n' countdown = countdown - 1 if countdown == 0: countdown_on = False elif line.startswith('F'): countdown_on = True countdown = 3 minified_exception += line + '\n' elif line.startswith('+') or line.startswith('-'): minified_exception += line + '\n' elif line.startswith('?'): minified_exception += line + '\n' elif line.strip().startswith('*'): minified_exception += line + '\n' if minified_exception: raise Exception(minified_exception) def check_window(self, name="default", level=0, baseline=False): """ *** Automated Visual Testing with SeleniumBase *** The first time a test calls self.check_window() for a unique "name" parameter provided, it will set a visual baseline, meaning that it creates a folder, saves the URL to a file, saves the current window screenshot to a file, and creates the following three files with the listed data saved: tags_level1.txt -> HTML tags from the window tags_level2.txt -> HTML tags + attributes from the window tags_level3.txt -> HTML tags + attributes/values from the window Baseline folders are named based on the test name and the name parameter passed to self.check_window(). The same test can store multiple baseline folders. If the baseline is being set/reset, the "level" doesn't matter. After the first run of self.check_window(), it will compare the HTML tags of the latest window to the one from the initial run. Here's how the level system works: * level=0 -> DRY RUN ONLY - Will perform a comparison to the baseline, and print out any differences that are found, but won't fail the test even if differences exist. * level=1 -> HTML tags are compared to tags_level1.txt * level=2 -> HTML tags are compared to tags_level1.txt and HTML tags/attributes are compared to tags_level2.txt * level=3 -> HTML tags are compared to tags_level1.txt and HTML tags + attributes are compared to tags_level2.txt and HTML tags + attributes/values are compared to tags_level3.txt As shown, Level-3 is the most strict, Level-1 is the least strict. If the comparisons from the latest window to the existing baseline don't match, the current test will fail, except for Level-0 tests. You can reset the visual baseline on the command line by using: --visual_baseline As long as "--visual_baseline" is used on the command line while running tests, the self.check_window() method cannot fail because it will rebuild the visual baseline rather than comparing the html tags of the latest run to the existing baseline. If there are any expected layout changes to a website that you're testing, you'll need to reset the baseline to prevent unnecessary failures. self.check_window() will fail with "Page Domain Mismatch Failure" if the page domain doesn't match the domain of the baseline. If you want to use self.check_window() to compare a web page to a later version of itself from within the same test run, you can add the parameter "baseline=True" to the first time you call self.check_window() in a test to use that as the baseline. This only makes sense if you're calling self.check_window() more than once with the same name parameter in the same test. Automated Visual Testing with self.check_window() is not very effective for websites that have dynamic content that changes the layout and structure of web pages. For those, you're much better off using regular SeleniumBase functional testing. Example usage: self.check_window(name="testing", level=0) self.check_window(name="xkcd_home", level=1) self.check_window(name="github_page", level=2) self.check_window(name="wikipedia_page", level=3) """ if level == "0": level = 0 if level == "1": level = 1 if level == "2": level = 2 if level == "3": level = 3 if level != 0 and level != 1 and level != 2 and level != 3: raise Exception('Parameter "level" must be set to 0, 1, 2, or 3!') if self.demo_mode: raise Exception( "WARNING: Using Demo Mode will break layout tests " "that use the check_window() method due to custom " "HTML edits being made on the page!\n" "Please rerun without using Demo Mode!") module = self.__class__.__module__ if '.' in module and len(module.split('.')[-1]) > 1: module = module.split('.')[-1] test_id = "%s.%s" % (module, self._testMethodName) if not name or len(name) < 1: name = "default" name = str(name) visual_helper.visual_baseline_folder_setup() baseline_dir = constants.VisualBaseline.STORAGE_FOLDER visual_baseline_path = baseline_dir + "/" + test_id + "/" + name page_url_file = visual_baseline_path + "/page_url.txt" screenshot_file = visual_baseline_path + "/screenshot.png" level_1_file = visual_baseline_path + "/tags_level_1.txt" level_2_file = visual_baseline_path + "/tags_level_2.txt" level_3_file = visual_baseline_path + "/tags_level_3.txt" set_baseline = False if baseline or self.visual_baseline: set_baseline = True if not os.path.exists(visual_baseline_path): set_baseline = True try: os.makedirs(visual_baseline_path) except Exception: pass # Only reachable during multi-threaded test runs if not os.path.exists(page_url_file): set_baseline = True if not os.path.exists(screenshot_file): set_baseline = True if not os.path.exists(level_1_file): set_baseline = True if not os.path.exists(level_2_file): set_baseline = True if not os.path.exists(level_3_file): set_baseline = True page_url = self.get_current_url() soup = self.get_beautiful_soup() html_tags = soup.body.find_all() level_1 = [[tag.name] for tag in html_tags] level_1 = json.loads(json.dumps(level_1)) # Tuples become lists level_2 = [[tag.name, sorted(tag.attrs.keys())] for tag in html_tags] level_2 = json.loads(json.dumps(level_2)) # Tuples become lists level_3 = [[tag.name, sorted(tag.attrs.items())] for tag in html_tags] level_3 = json.loads(json.dumps(level_3)) # Tuples become lists if set_baseline: self.save_screenshot("screenshot.png", visual_baseline_path) out_file = codecs.open(page_url_file, "w+") out_file.writelines(page_url) out_file.close() out_file = codecs.open(level_1_file, "w+") out_file.writelines(json.dumps(level_1)) out_file.close() out_file = codecs.open(level_2_file, "w+") out_file.writelines(json.dumps(level_2)) out_file.close() out_file = codecs.open(level_3_file, "w+") out_file.writelines(json.dumps(level_3)) out_file.close() if not set_baseline: f = open(page_url_file, 'r') page_url_data = f.read().strip() f.close() f = open(level_1_file, 'r') level_1_data = json.loads(f.read()) f.close() f = open(level_2_file, 'r') level_2_data = json.loads(f.read()) f.close() f = open(level_3_file, 'r') level_3_data = json.loads(f.read()) f.close() domain_fail = ( "\nPage Domain Mismatch Failure: " "Current Page Domain doesn't match the Page Domain of the " "Baseline! Can't compare two completely different sites! " "Run with --visual_baseline to reset the baseline!") level_1_failure = ( "\n*\n*** Exception: <Level 1> Visual Diff Failure:\n" "* HTML tags don't match the baseline!") level_2_failure = ( "\n*\n*** Exception: <Level 2> Visual Diff Failure:\n" "* HTML tag attribute names don't match the baseline!") level_3_failure = ( "\n*\n*** Exception: <Level 3> Visual Diff Failure:\n" "* HTML tag attribute values don't match the baseline!") page_domain = self.get_domain_url(page_url) page_data_domain = self.get_domain_url(page_url_data) unittest.TestCase.maxDiff = 1000 if level != 0: self.assertEqual(page_data_domain, page_domain, domain_fail) unittest.TestCase.maxDiff = None if level == 3: self.__assert_eq(level_3_data, level_3, level_3_failure) if level == 2: self.__assert_eq(level_2_data, level_2, level_2_failure) unittest.TestCase.maxDiff = 1000 if level == 1: self.__assert_eq(level_1_data, level_1, level_1_failure) unittest.TestCase.maxDiff = None if level == 0: try: unittest.TestCase.maxDiff = 1000 self.assertEqual( page_domain, page_data_domain, domain_fail) unittest.TestCase.maxDiff = None self.__assert_eq(level_3_data, level_3, level_3_failure) except Exception as e: print(e) # Level-0 Dry Run (Only print the differences) ############ def __get_new_timeout(self, timeout): """ When using --timeout_multiplier=#.# """ try: timeout_multiplier = float(self.timeout_multiplier) if timeout_multiplier <= 0.5: timeout_multiplier = 0.5 timeout = int(math.ceil(timeout_multiplier * timeout)) return timeout except Exception: # Wrong data type for timeout_multiplier (expecting int or float) return timeout ############ def __get_exception_message(self): """ This method extracts the message from an exception if there was an exception that occurred during the test, assuming that the exception was in a try/except block and not thrown. """ exception_info = sys.exc_info()[1] if hasattr(exception_info, 'msg'): exc_message = exception_info.msg elif hasattr(exception_info, 'message'): exc_message = exception_info.message else: exc_message = sys.exc_info() return exc_message def __get_improved_exception_message(self): """ If Chromedriver is out-of-date, make it clear! Given the high popularity of the following StackOverflow article: https://stackoverflow.com/questions/49162667/unknown-error- call-function-result-missing-value-for-selenium-send-keys-even ... the original error message was not helpful. Tell people directly. (Only expected when using driver.send_keys() with an old Chromedriver.) """ exc_message = self.__get_exception_message() maybe_using_old_chromedriver = False if "unknown error: call function result missing" in exc_message: maybe_using_old_chromedriver = True if self.browser == 'chrome' and maybe_using_old_chromedriver: update = ("Your version of ChromeDriver may be out-of-date! " "Please go to " "https://sites.google.com/a/chromium.org/chromedriver/ " "and download the latest version to your system PATH! " "Or use: ``seleniumbase install chromedriver`` . " "Original Exception Message: %s" % exc_message) exc_message = update return exc_message def __add_delayed_assert_failure(self): """ Add a delayed_assert failure into a list for future processing. """ current_url = self.driver.current_url message = self.__get_exception_message() self.__delayed_assert_failures.append( "CHECK #%s: (%s)\n %s" % ( self.__delayed_assert_count, current_url, message)) def delayed_assert_element(self, selector, by=By.CSS_SELECTOR, timeout=None): """ A non-terminating assertion for an element on a page. Failures will be saved until the process_delayed_asserts() method is called from inside a test, likely at the end of it. """ if not timeout: timeout = settings.MINI_TIMEOUT if self.timeout_multiplier and timeout == settings.MINI_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__delayed_assert_count += 1 try: url = self.get_current_url() if url == self.__last_url_of_delayed_assert: timeout = 1 else: self.__last_url_of_delayed_assert = url except Exception: pass try: self.wait_for_element_visible(selector, by=by, timeout=timeout) return True except Exception: self.__add_delayed_assert_failure() return False def delayed_assert_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): """ A non-terminating assertion for text from an element on a page. Failures will be saved until the process_delayed_asserts() method is called from inside a test, likely at the end of it. """ if not timeout: timeout = settings.MINI_TIMEOUT if self.timeout_multiplier and timeout == settings.MINI_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__delayed_assert_count += 1 try: url = self.get_current_url() if url == self.__last_url_of_delayed_assert: timeout = 1 else: self.__last_url_of_delayed_assert = url except Exception: pass try: self.wait_for_text_visible(text, selector, by=by, timeout=timeout) return True except Exception: self.__add_delayed_assert_failure() return False def process_delayed_asserts(self, print_only=False): """ To be used with any test that uses delayed_asserts, which are non-terminating verifications that only raise exceptions after this method is called. This is useful for pages with multiple elements to be checked when you want to find as many bugs as possible in a single test run before having all the exceptions get raised simultaneously. Might be more useful if this method is called after processing all the delayed asserts on a single html page so that the failure screenshot matches the location of the delayed asserts. If "print_only" is set to True, the exception won't get raised. """ if self.__delayed_assert_failures: exception_output = '' exception_output += "\n*** DELAYED ASSERTION FAILURES FOR: " exception_output += "%s\n" % self.id() all_failing_checks = self.__delayed_assert_failures self.__delayed_assert_failures = [] for tb in all_failing_checks: exception_output += "%s\n" % tb if print_only: print(exception_output) else: raise Exception(exception_output) ############ def __js_click(self, selector, by=By.CSS_SELECTOR): """ Clicks an element using pure JS. Does not use jQuery. """ selector, by = self.__recalculate_selector(selector, by) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var simulateClick = function (elem) { var evt = new MouseEvent('click', { bubbles: true, cancelable: true, view: window }); var canceled = !elem.dispatchEvent(evt); }; var someLink = document.querySelector('%s'); simulateClick(someLink);""" % css_selector) self.execute_script(script) def __js_click_all(self, selector, by=By.CSS_SELECTOR): """ Clicks all matching elements using pure JS. (No jQuery) """ selector, by = self.__recalculate_selector(selector, by) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var simulateClick = function (elem) { var evt = new MouseEvent('click', { bubbles: true, cancelable: true, view: window }); var canceled = !elem.dispatchEvent(evt); }; var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ simulateClick($elements[index]);}""" % css_selector) self.execute_script(script) def __jquery_click(self, selector, by=By.CSS_SELECTOR): """ Clicks an element using jQuery. Different from using pure JS. """ selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) click_script = """jQuery('%s')[0].click()""" % selector self.safe_execute_script(click_script) def __get_href_from_link_text(self, link_text, hard_fail=True): href = self.get_link_attribute(link_text, "href", hard_fail) if not href: return None if href.startswith('//'): link = "http:" + href elif href.startswith('/'): url = self.driver.current_url domain_url = self.get_domain_url(url) link = domain_url + href else: link = href return link def __click_dropdown_link_text(self, link_text, link_css): """ When a link may be hidden under a dropdown menu, use this. """ soup = self.get_beautiful_soup() drop_down_list = [] for item in soup.select('li[class]'): drop_down_list.append(item) csstype = link_css.split('[')[1].split('=')[0] for item in drop_down_list: item_text_list = item.text.split('\n') if link_text in item_text_list and csstype in item.decode(): dropdown_css = "" try: for css_class in item['class']: dropdown_css += '.' dropdown_css += css_class except Exception: continue dropdown_css = item.name + dropdown_css matching_dropdowns = self.find_visible_elements(dropdown_css) for dropdown in matching_dropdowns: # The same class names might be used for multiple dropdowns if dropdown.is_displayed(): try: try: page_actions.hover_element( self.driver, dropdown) except Exception: # If hovering fails, driver is likely outdated # Time to go directly to the hidden link text self.open(self.__get_href_from_link_text( link_text)) return True page_actions.hover_element_and_click( self.driver, dropdown, link_text, click_by=By.LINK_TEXT, timeout=0.12) return True except Exception: pass return False def __get_href_from_partial_link_text(self, link_text, hard_fail=True): href = self.get_partial_link_text_attribute( link_text, "href", hard_fail) if not href: return None if href.startswith('//'): link = "http:" + href elif href.startswith('/'): url = self.driver.current_url domain_url = self.get_domain_url(url) link = domain_url + href else: link = href return link def __click_dropdown_partial_link_text(self, link_text, link_css): """ When a partial link may be hidden under a dropdown, use this. """ soup = self.get_beautiful_soup() drop_down_list = [] for item in soup.select('li[class]'): drop_down_list.append(item) csstype = link_css.split('[')[1].split('=')[0] for item in drop_down_list: item_text_list = item.text.split('\n') if link_text in item_text_list and csstype in item.decode(): dropdown_css = "" try: for css_class in item['class']: dropdown_css += '.' dropdown_css += css_class except Exception: continue dropdown_css = item.name + dropdown_css matching_dropdowns = self.find_visible_elements(dropdown_css) for dropdown in matching_dropdowns: # The same class names might be used for multiple dropdowns if dropdown.is_displayed(): try: try: page_actions.hover_element( self.driver, dropdown) except Exception: # If hovering fails, driver is likely outdated # Time to go directly to the hidden link text self.open( self.__get_href_from_partial_link_text( link_text)) return True page_actions.hover_element_and_click( self.driver, dropdown, link_text, click_by=By.LINK_TEXT, timeout=0.12) return True except Exception: pass return False def __recalculate_selector(self, selector, by): # Try to determine the type of selector automatically if page_utils.is_xpath_selector(selector): by = By.XPATH if page_utils.is_link_text_selector(selector): selector = page_utils.get_link_text_from_selector(selector) by = By.LINK_TEXT if page_utils.is_partial_link_text_selector(selector): selector = page_utils.get_partial_link_text_from_selector(selector) by = By.PARTIAL_LINK_TEXT if page_utils.is_name_selector(selector): name = page_utils.get_name_from_selector(selector) selector = '[name="%s"]' % name by = By.CSS_SELECTOR return (selector, by) def __make_css_match_first_element_only(self, selector): # Only get the first match return page_utils.make_css_match_first_element_only(selector) def __demo_mode_pause_if_active(self, tiny=False): if self.demo_mode: wait_time = settings.DEFAULT_DEMO_MODE_TIMEOUT if self.demo_sleep: wait_time = float(self.demo_sleep) if not tiny: time.sleep(wait_time) else: time.sleep(wait_time / 3.4) elif self.slow_mode: self.__slow_mode_pause_if_active() def __slow_mode_pause_if_active(self): if self.slow_mode: wait_time = settings.DEFAULT_DEMO_MODE_TIMEOUT if self.demo_sleep: wait_time = float(self.demo_sleep) time.sleep(wait_time) def __demo_mode_scroll_if_active(self, selector, by): if self.demo_mode: self.slow_scroll_to(selector, by=by) def __demo_mode_highlight_if_active(self, selector, by): if self.demo_mode: # Includes self.slow_scroll_to(selector, by=by) by default self.highlight(selector, by=by) elif self.slow_mode: # Just do the slow scroll part of the highlight() method selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__slow_scroll_to_element(element) def __scroll_to_element(self, element, selector=None, by=By.CSS_SELECTOR): success = js_utils.scroll_to_element(self.driver, element) if not success and selector: self.wait_for_ready_state_complete() element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=settings.SMALL_TIMEOUT) self.__demo_mode_pause_if_active(tiny=True) def __slow_scroll_to_element(self, element): js_utils.slow_scroll_to_element(self.driver, element, self.browser) def __highlight_with_assert_success( self, message, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't highlight if can't convert to CSS_SELECTOR return try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__slow_scroll_to_element(element) o_bs = '' # original_box_shadow try: style = element.get_attribute('style') except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=By.CSS_SELECTOR, timeout=settings.SMALL_TIMEOUT) style = element.get_attribute('style') if style: if 'box-shadow: ' in style: box_start = style.find('box-shadow: ') box_end = style.find(';', box_start) + 1 original_box_shadow = style[box_start:box_end] o_bs = original_box_shadow if ":contains" not in selector and ":first" not in selector: selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) self.__highlight_with_js_2(message, selector, o_bs) else: selector = self.__make_css_match_first_element_only(selector) selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) try: self.__highlight_with_jquery_2(message, selector, o_bs) except Exception: pass # JQuery probably couldn't load. Skip highlighting. time.sleep(0.065) def __highlight_with_js_2(self, message, selector, o_bs): js_utils.highlight_with_js_2( self.driver, message, selector, o_bs, self.message_duration) def __highlight_with_jquery_2(self, message, selector, o_bs): js_utils.highlight_with_jquery_2( self.driver, message, selector, o_bs, self.message_duration) ############ # Deprecated Methods (Replace these if they're still in your code!) @decorators.deprecated( "scroll_click() is deprecated. Use self.click() - It scrolls for you!") def scroll_click(self, selector, by=By.CSS_SELECTOR): # DEPRECATED - self.click() now scrolls to the element before clicking. # self.scroll_to(selector, by=by) # Redundant self.click(selector, by=by) @decorators.deprecated( "update_text_value() is deprecated. Use self.update_text() instead!") def update_text_value(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None, retry=False): # DEPRECATED - self.update_text() should be used instead. if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.update_text( selector, new_value, by=by, timeout=timeout, retry=retry) @decorators.deprecated( "jquery_update_text_value() is deprecated. Use jquery_update_text()") def jquery_update_text_value(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): # DEPRECATED - self.jquery_update_text() should be used instead. if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.jquery_update_text(selector, new_value, by=by, timeout=timeout) @decorators.deprecated( "jq_format() is deprecated. Use re.escape() instead!") def jq_format(self, code): # DEPRECATED - re.escape() already performs the intended action! return js_utils._jq_format(code) ############ def setUp(self, masterqa_mode=False): """ Be careful if a subclass of BaseCase overrides setUp() You'll need to add the following line to the subclass setUp() method: super(SubClassOfBaseCase, self).setUp() """ self.masterqa_mode = masterqa_mode self.is_pytest = None try: # This raises an exception if the test is not coming from pytest self.is_pytest = sb_config.is_pytest except Exception: # Not using pytest (probably nosetests) self.is_pytest = False if self.is_pytest: # pytest-specific code test_id = self.__get_test_id() self.browser = sb_config.browser self.data = sb_config.data self.var1 = sb_config.var1 self.var2 = sb_config.var2 self.var3 = sb_config.var3 self.slow_mode = sb_config.slow_mode self.demo_mode = sb_config.demo_mode self.demo_sleep = sb_config.demo_sleep self.highlights = sb_config.highlights self.time_limit = sb_config.time_limit self.environment = sb_config.environment self.env = self.environment # Add a shortened version self.with_selenium = sb_config.with_selenium # Should be True self.headless = sb_config.headless self.headless_active = False self.headed = sb_config.headed self.start_page = sb_config.start_page self.log_path = sb_config.log_path self.with_testing_base = sb_config.with_testing_base self.with_basic_test_info = sb_config.with_basic_test_info self.with_screen_shots = sb_config.with_screen_shots self.with_page_source = sb_config.with_page_source self.with_db_reporting = sb_config.with_db_reporting self.with_s3_logging = sb_config.with_s3_logging self.servername = sb_config.servername self.port = sb_config.port self.proxy_string = sb_config.proxy_string self.user_agent = sb_config.user_agent self.mobile_emulator = sb_config.mobile_emulator self.device_metrics = sb_config.device_metrics self.cap_file = sb_config.cap_file self.cap_string = sb_config.cap_string self.settings_file = sb_config.settings_file self.database_env = sb_config.database_env self.message_duration = sb_config.message_duration self.js_checking_on = sb_config.js_checking_on self.ad_block_on = sb_config.ad_block_on self.verify_delay = sb_config.verify_delay self.disable_csp = sb_config.disable_csp self.enable_sync = sb_config.enable_sync self.use_auto_ext = sb_config.use_auto_ext self.no_sandbox = sb_config.no_sandbox self.disable_gpu = sb_config.disable_gpu self.incognito = sb_config.incognito self.guest_mode = sb_config.guest_mode self.devtools = sb_config.devtools self.user_data_dir = sb_config.user_data_dir self.extension_zip = sb_config.extension_zip self.extension_dir = sb_config.extension_dir self.maximize_option = sb_config.maximize_option self._reuse_session = sb_config.reuse_session self.save_screenshot_after_test = sb_config.save_screenshot self.visual_baseline = sb_config.visual_baseline self.timeout_multiplier = sb_config.timeout_multiplier self.pytest_html_report = sb_config.pytest_html_report self.report_on = False if self.pytest_html_report: self.report_on = True self.use_grid = False if self.servername != "localhost": # Use Selenium Grid (Use --server="127.0.0.1" for a local Grid) self.use_grid = True if self.with_db_reporting: from seleniumbase.core.application_manager import ( ApplicationManager) from seleniumbase.core.testcase_manager import ( ExecutionQueryPayload) import getpass self.execution_guid = str(uuid.uuid4()) self.testcase_guid = None self.execution_start_time = 0 self.case_start_time = 0 self.application = None self.testcase_manager = None self.error_handled = False self.testcase_manager = TestcaseManager(self.database_env) # exec_payload = ExecutionQueryPayload() exec_payload.execution_start_time = int(time.time() * 1000) self.execution_start_time = exec_payload.execution_start_time exec_payload.guid = self.execution_guid exec_payload.username = getpass.getuser() self.testcase_manager.insert_execution_data(exec_payload) # data_payload = TestcaseDataPayload() self.testcase_guid = str(uuid.uuid4()) data_payload.guid = self.testcase_guid data_payload.execution_guid = self.execution_guid if self.with_selenium: data_payload.browser = self.browser else: data_payload.browser = "N/A" data_payload.test_address = test_id application = ApplicationManager.generate_application_string( self._testMethodName) data_payload.env = application.split('.')[0] data_payload.start_time = application.split('.')[1] data_payload.state = constants.State.NOTRUN self.testcase_manager.insert_testcase_data(data_payload) self.case_start_time = int(time.time() * 1000) if self.headless: width = settings.HEADLESS_START_WIDTH height = settings.HEADLESS_START_HEIGHT try: # from pyvirtualdisplay import Display # Skip for own lib from seleniumbase.virtual_display.display import Display self.display = Display(visible=0, size=(width, height)) self.display.start() self.headless_active = True except Exception: # pyvirtualdisplay might not be necessary anymore because # Chrome and Firefox now have built-in headless displays pass else: # (Nosetests / Not Pytest) pass # Setup performed in plugins # Verify that SeleniumBase is installed successfully if not hasattr(self, "browser"): raise Exception("""SeleniumBase plugins DID NOT load!\n\n""" """*** Please REINSTALL SeleniumBase using: >\n""" """ >>> "pip install -r requirements.txt"\n""" """ >>> "python setup.py install" """) # Configure the test time limit (if used) self.set_time_limit(self.time_limit) # Set the start time for the test (in ms) sb_config.start_time_ms = int(time.time() * 1000.0) # Parse the settings file if self.settings_file: settings_parser.set_settings(self.settings_file) # Mobile Emulator device metrics: CSS Width, CSS Height, & Pixel-Ratio if self.device_metrics: metrics_string = self.device_metrics metrics_string = metrics_string.replace(' ', '') metrics_list = metrics_string.split(',') exception_string = ( 'Invalid input for Mobile Emulator device metrics!\n' 'Expecting a comma-separated string with three\n' 'integer values for Width, Height, and Pixel-Ratio.\n' 'Example: --metrics="411,731,3" ') if len(metrics_list) != 3: raise Exception(exception_string) try: self.__device_width = int(metrics_list[0]) self.__device_height = int(metrics_list[1]) self.__device_pixel_ratio = int(metrics_list[2]) self.mobile_emulator = True except Exception: raise Exception(exception_string) if self.mobile_emulator: if not self.user_agent: # Use the Pixel 3 user agent by default if not specified self.user_agent = ( "Mozilla/5.0 (Linux; Android 9; Pixel 3 XL) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/76.0.3809.132 Mobile Safari/537.36") has_url = False if self._reuse_session: if not hasattr(sb_config, 'shared_driver'): sb_config.shared_driver = None if sb_config.shared_driver: try: self._default_driver = sb_config.shared_driver self.driver = sb_config.shared_driver self._drivers_list = [sb_config.shared_driver] url = self.get_current_url() if len(url) > 3: has_url = True except Exception: pass if self._reuse_session and sb_config.shared_driver and has_url: if self.start_page and len(self.start_page) >= 4: if page_utils.is_valid_url(self.start_page): self.open(self.start_page) else: new_start_page = "http://" + self.start_page if page_utils.is_valid_url(new_start_page): self.open(new_start_page) else: if self.get_current_url() != "data:,": self.open("data:,") else: # Launch WebDriver for both Pytest and Nosetests self.driver = self.get_new_driver(browser=self.browser, headless=self.headless, servername=self.servername, port=self.port, proxy=self.proxy_string, agent=self.user_agent, switch_to=True, cap_file=self.cap_file, cap_string=self.cap_string, disable_csp=self.disable_csp, enable_sync=self.enable_sync, use_auto_ext=self.use_auto_ext, no_sandbox=self.no_sandbox, disable_gpu=self.disable_gpu, incognito=self.incognito, guest_mode=self.guest_mode, devtools=self.devtools, user_data_dir=self.user_data_dir, extension_zip=self.extension_zip, extension_dir=self.extension_dir, is_mobile=self.mobile_emulator, d_width=self.__device_width, d_height=self.__device_height, d_p_r=self.__device_pixel_ratio) self._default_driver = self.driver if self._reuse_session: sb_config.shared_driver = self.driver def __set_last_page_screenshot(self): """ self.__last_page_screenshot is only for pytest html report logs self.__last_page_screenshot_png is for all screenshot log files """ if not self.__last_page_screenshot and ( not self.__last_page_screenshot_png): try: element = self.driver.find_element( by=By.TAG_NAME, value="body") if self.is_pytest and self.report_on: self.__last_page_screenshot_png = ( self.driver.get_screenshot_as_png()) self.__last_page_screenshot = element.screenshot_as_base64 else: self.__last_page_screenshot_png = element.screenshot_as_png except Exception: if not self.__last_page_screenshot: if self.is_pytest and self.report_on: try: self.__last_page_screenshot = ( self.driver.get_screenshot_as_base64()) except Exception: pass if not self.__last_page_screenshot_png: try: self.__last_page_screenshot_png = ( self.driver.get_screenshot_as_png()) except Exception: pass def __set_last_page_url(self): if not self.__last_page_url: try: self.__last_page_url = log_helper.get_last_page(self.driver) except Exception: self.__last_page_url = None def __set_last_page_source(self): if not self.__last_page_source: try: self.__last_page_source = ( log_helper.get_html_source_with_base_href( self.driver, self.driver.page_source)) except Exception: self.__last_page_source = None def __insert_test_result(self, state, err): data_payload = TestcaseDataPayload() data_payload.runtime = int(time.time() * 1000) - self.case_start_time data_payload.guid = self.testcase_guid data_payload.execution_guid = self.execution_guid data_payload.state = state if err: import traceback tb_string = traceback.format_exc() if "Message: " in tb_string: data_payload.message = "Message: " + tb_string.split( "Message: ")[-1] elif "Exception: " in tb_string: data_payload.message = tb_string.split("Exception: ")[-1] elif "Error: " in tb_string: data_payload.message = tb_string.split("Error: ")[-1] else: data_payload.message = "Unknown Error: See Stacktrace" self.testcase_manager.update_testcase_data(data_payload) def __add_pytest_html_extra(self): if not self.__added_pytest_html_extra: try: if self.with_selenium: if not self.__last_page_screenshot: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() if self.report_on: extra_url = {} extra_url['name'] = 'URL' extra_url['format'] = 'url' extra_url['content'] = self.get_current_url() extra_url['mime_type'] = None extra_url['extension'] = None extra_image = {} extra_image['name'] = 'Screenshot' extra_image['format'] = 'image' extra_image['content'] = self.__last_page_screenshot extra_image['mime_type'] = 'image/png' extra_image['extension'] = 'png' self.__added_pytest_html_extra = True self._html_report_extra.append(extra_url) self._html_report_extra.append(extra_image) except Exception: pass def __quit_all_drivers(self): if self._reuse_session and sb_config.shared_driver: if len(self._drivers_list) > 0: sb_config.shared_driver = self._drivers_list[0] self._default_driver = self._drivers_list[0] self.switch_to_default_driver() if len(self._drivers_list) > 1: self._drivers_list = self._drivers_list[1:] else: self._drivers_list = [] # Close all open browser windows self._drivers_list.reverse() # Last In, First Out for driver in self._drivers_list: try: driver.quit() except AttributeError: pass except Exception: pass self.driver = None self._default_driver = None self._drivers_list = [] def __has_exception(self): has_exception = False if sys.version_info[0] >= 3 and hasattr(self, '_outcome'): if hasattr(self._outcome, 'errors') and self._outcome.errors: has_exception = True else: has_exception = sys.exc_info()[1] is not None return has_exception def __get_test_id(self): test_id = "%s.%s.%s" % (self.__class__.__module__, self.__class__.__name__, self._testMethodName) return test_id def __create_log_path_as_needed(self, test_logpath): if not os.path.exists(test_logpath): try: os.makedirs(test_logpath) except Exception: pass # Only reachable during multi-threaded runs def save_teardown_screenshot(self): """ (Should ONLY be used at the start of custom tearDown() methods.) This method takes a screenshot of the current web page for a failing test (or when running your tests with --save-screenshot). That way your tearDown() method can navigate away from the last page where the test failed, and still get the correct screenshot before performing tearDown() steps on other pages. If this method is not included in your custom tearDown() method, a screenshot will still be taken after the last step of your tearDown(), where you should be calling "super(SubClassOfBaseCase, self).tearDown()" """ if self.__has_exception() or self.save_screenshot_after_test: test_id = self.__get_test_id() test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() if self.is_pytest: self.__add_pytest_html_extra() def tearDown(self): """ Be careful if a subclass of BaseCase overrides setUp() You'll need to add the following line to the subclass's tearDown(): super(SubClassOfBaseCase, self).tearDown() """ self.__slow_mode_pause_if_active() has_exception = self.__has_exception() if self.__delayed_assert_failures: print( "\nWhen using self.delayed_assert_*() methods in your tests, " "remember to call self.process_delayed_asserts() afterwards. " "Now calling in tearDown()...\nFailures Detected:") if not has_exception: self.process_delayed_asserts() else: self.process_delayed_asserts(print_only=True) if self.is_pytest: # pytest-specific code test_id = self.__get_test_id() try: with_selenium = self.with_selenium except Exception: sub_class_name = str( self.__class__.__bases__[0]).split('.')[-1].split("'")[0] sub_file_name = str(self.__class__.__bases__[0]).split('.')[-2] sub_file_name = sub_file_name + ".py" class_name = str(self.__class__).split('.')[-1].split("'")[0] file_name = str(self.__class__).split('.')[-2] + ".py" class_name_used = sub_class_name file_name_used = sub_file_name if sub_class_name == "BaseCase": class_name_used = class_name file_name_used = file_name fix_setup = "super(%s, self).setUp()" % class_name_used fix_teardown = "super(%s, self).tearDown()" % class_name_used message = ("You're overriding SeleniumBase's BaseCase setUp() " "method with your own setUp() method, which breaks " "SeleniumBase. You can fix this by going to your " "%s class located in your %s file and adding the " "following line of code AT THE BEGINNING of your " "setUp() method:\n%s\n\nAlso make sure " "you have added the following line of code AT THE " "END of your tearDown() method:\n%s\n" % (class_name_used, file_name_used, fix_setup, fix_teardown)) raise Exception(message) if with_selenium: # Save a screenshot if logging is on when an exception occurs if has_exception: self.__add_pytest_html_extra() if self.with_testing_base and not has_exception and ( self.save_screenshot_after_test): test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) self.__add_pytest_html_extra() if self.with_testing_base and has_exception: test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) if ((not self.with_screen_shots) and ( not self.with_basic_test_info) and ( not self.with_page_source)): # Log everything if nothing specified (if testing_base) if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) log_helper.log_test_failure_data( self, test_logpath, self.driver, self.browser, self.__last_page_url) log_helper.log_page_source( test_logpath, self.driver, self.__last_page_source) else: if self.with_screen_shots: if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) if self.with_basic_test_info: log_helper.log_test_failure_data( self, test_logpath, self.driver, self.browser, self.__last_page_url) if self.with_page_source: log_helper.log_page_source( test_logpath, self.driver, self.__last_page_source) # (Pytest) Finally close all open browser windows self.__quit_all_drivers() if self.headless: if self.headless_active: try: self.display.stop() except AttributeError: pass except Exception: pass self.display = None if self.with_db_reporting: if has_exception: self.__insert_test_result(constants.State.ERROR, True) else: self.__insert_test_result(constants.State.PASS, False) runtime = int(time.time() * 1000) - self.execution_start_time self.testcase_manager.update_execution_data( self.execution_guid, runtime) if self.with_s3_logging and has_exception: """ If enabled, upload logs to S3 during test exceptions. """ from seleniumbase.core.s3_manager import S3LoggingBucket s3_bucket = S3LoggingBucket() guid = str(uuid.uuid4().hex) path = "%s/%s" % (self.log_path, test_id) uploaded_files = [] for logfile in os.listdir(path): logfile_name = "%s/%s/%s" % (guid, test_id, logfile.split(path)[-1]) s3_bucket.upload_file(logfile_name, "%s/%s" % (path, logfile)) uploaded_files.append(logfile_name) s3_bucket.save_uploaded_file_names(uploaded_files) index_file = s3_bucket.upload_index_file(test_id, guid) print("\n\n*** Log files uploaded: ***\n%s\n" % index_file) logging.info( "\n\n*** Log files uploaded: ***\n%s\n" % index_file) if self.with_db_reporting: self.testcase_manager = TestcaseManager(self.database_env) data_payload = TestcaseDataPayload() data_payload.guid = self.testcase_guid data_payload.logURL = index_file self.testcase_manager.update_testcase_log_url(data_payload) else: # (Nosetests) if has_exception: test_id = self.__get_test_id() test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) log_helper.log_test_failure_data( self, test_logpath, self.driver, self.browser, self.__last_page_url) if len(self._drivers_list) > 0: if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) log_helper.log_page_source( test_logpath, self.driver, self.__last_page_source) elif self.save_screenshot_after_test: test_id = self.__get_test_id() test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) if self.report_on: self._last_page_screenshot = self.__last_page_screenshot_png try: self._last_page_url = self.get_current_url() except Exception: self._last_page_url = "(Error: Unknown URL)" # Finally close all open browser windows self.__quit_all_drivers()
46.286687
79
0.587236
import codecs import json import logging import math import os import re import sys import time import urllib3 import unittest import uuid from selenium.common.exceptions import (StaleElementReferenceException, MoveTargetOutOfBoundsException, WebDriverException) from selenium.common import exceptions as selenium_exceptions from selenium.webdriver.common.by import By from selenium.webdriver.common.keys import Keys from selenium.webdriver.remote.remote_connection import LOGGER from selenium.webdriver.support.ui import Select from seleniumbase import config as sb_config from seleniumbase.common import decorators from seleniumbase.config import settings from seleniumbase.core.testcase_manager import TestcaseDataPayload from seleniumbase.core.testcase_manager import TestcaseManager from seleniumbase.core import download_helper from seleniumbase.core import log_helper from seleniumbase.core import settings_parser from seleniumbase.core import tour_helper from seleniumbase.core import visual_helper from seleniumbase.fixtures import constants from seleniumbase.fixtures import js_utils from seleniumbase.fixtures import page_actions from seleniumbase.fixtures import page_utils from seleniumbase.fixtures import shared_utils from seleniumbase.fixtures import xpath_to_css logging.getLogger("requests").setLevel(logging.ERROR) logging.getLogger("urllib3").setLevel(logging.ERROR) urllib3.disable_warnings() LOGGER.setLevel(logging.WARNING) ECI_Exception = selenium_exceptions.ElementClickInterceptedException ENI_Exception = selenium_exceptions.ElementNotInteractableException class BaseCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(BaseCase, self).__init__(*args, **kwargs) self.driver = None self.environment = None self.env = None self.__last_url_of_delayed_assert = "data:," self.__last_page_load_url = "data:," self.__last_page_screenshot = None self.__last_page_screenshot_png = None self.__last_page_url = None self.__last_page_source = None self.__added_pytest_html_extra = None self.__delayed_assert_count = 0 self.__delayed_assert_failures = [] self.__device_width = None self.__device_height = None self.__device_pixel_ratio = None self._html_report_extra = [] self._default_driver = None self._drivers_list = [] self._tour_steps = {} def open(self, url): self.__last_page_load_url = None if url.startswith("://"): url = "https" + url self.driver.get(url) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def open_url(self, url): self.open(url) def get(self, url): self.open(url) def visit(self, url): self.open(url) def click(self, selector, by=By.CSS_SELECTOR, timeout=None, delay=0): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) if page_utils.is_link_text_selector(selector) or by == By.LINK_TEXT: if not self.is_link_text_visible(selector): self.click_link_text(selector, timeout=timeout) return if page_utils.is_partial_link_text_selector(selector) or ( by == By.PARTIAL_LINK_TEXT): if not self.is_partial_link_text_visible(selector): self.click_partial_link_text(selector, timeout=timeout) return element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) pre_action_url = self.driver.current_url if delay and delay > 0: time.sleep(delay) try: if self.browser == 'ie' and by == By.LINK_TEXT: self.__jquery_click(selector, by=by) elif self.browser == "safari": if by == By.LINK_TEXT: self.__jquery_click(selector, by=by) else: self.__js_click(selector, by=by) else: element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) if self.browser == "safari": if by == By.LINK_TEXT: self.__jquery_click(selector, by=by) else: self.__js_click(selector, by=by) else: element.click() except (WebDriverException, MoveTargetOutOfBoundsException): self.wait_for_ready_state_complete() try: self.__js_click(selector, by=by) except Exception: try: self.__jquery_click(selector, by=by) except Exception: element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) element.click() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def slow_click(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if not self.demo_mode: self.click(selector, by=by, timeout=timeout, delay=1.05) else: self.click(selector, by=by, timeout=timeout, delay=0.25) def double_click(self, selector, by=By.CSS_SELECTOR, timeout=None): from selenium.webdriver.common.action_chains import ActionChains if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) pre_action_url = self.driver.current_url try: actions = ActionChains(self.driver) actions.move_to_element(element) actions.double_click(element) actions.perform() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=timeout) actions = ActionChains(self.driver) actions.move_to_element(element) actions.double_click(element) actions.perform() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def click_chain(self, selectors_list, by=By.CSS_SELECTOR, timeout=None, spacing=0): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) for selector in selectors_list: self.click(selector, by=by, timeout=timeout) if spacing > 0: time.sleep(spacing) def type(self, selector, text, by=By.CSS_SELECTOR, timeout=None, retry=False): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.update_text(selector, text, by=by, timeout=timeout, retry=retry) def input(self, selector, text, by=By.CSS_SELECTOR, timeout=None, retry=False): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.update_text(selector, text, by=by, timeout=timeout, retry=retry) def update_text(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None, retry=False): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) try: element.clear() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) try: element.clear() except Exception: pass except Exception: pass # Clearing the text field first isn't critical self.__demo_mode_pause_if_active(tiny=True) pre_action_url = self.driver.current_url if type(new_value) is int or type(new_value) is float: new_value = str(new_value) try: if not new_value.endswith('\n'): element.send_keys(new_value) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() else: new_value = new_value[:-1] element.send_keys(new_value) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) element.clear() if not new_value.endswith('\n'): element.send_keys(new_value) else: new_value = new_value[:-1] element.send_keys(new_value) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except Exception: exc_message = self.__get_improved_exception_message() raise Exception(exc_message) if (retry and element.get_attribute('value') != new_value and ( not new_value.endswith('\n'))): logging.debug('update_text() is falling back to JavaScript!') self.set_value(selector, new_value, by=by) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def add_text(self, selector, text, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) pre_action_url = self.driver.current_url try: if not text.endswith('\n'): element.send_keys(text) else: text = text[:-1] element.send_keys(text) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) if not text.endswith('\n'): element.send_keys(text) else: text = text[:-1] element.send_keys(text) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() except Exception: exc_message = self.__get_improved_exception_message() raise Exception(exc_message) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def send_keys(self, selector, text, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.add_text(selector, text, by=by, timeout=timeout) def submit(self, selector, by=By.CSS_SELECTOR): if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) element.submit() self.__demo_mode_pause_if_active() def refresh_page(self): self.__last_page_load_url = None self.driver.refresh() self.wait_for_ready_state_complete() def refresh(self): self.refresh_page() def get_current_url(self): current_url = self.driver.current_url if "%" in current_url and sys.version_info[0] >= 3: try: from urllib.parse import unquote current_url = unquote(current_url, errors='strict') except Exception: pass return current_url def get_page_source(self): self.wait_for_ready_state_complete() return self.driver.page_source def get_page_title(self): self.wait_for_ready_state_complete() self.wait_for_element_present("title", timeout=settings.SMALL_TIMEOUT) time.sleep(0.03) return self.driver.title def get_title(self): return self.get_page_title() def go_back(self): self.__last_page_load_url = None self.driver.back() if self.browser == "safari": self.driver.refresh() self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def go_forward(self): self.__last_page_load_url = None self.driver.forward() if self.browser == "safari": self.driver.refresh() self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def is_element_present(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) return page_actions.is_element_present(self.driver, selector, by) def is_element_visible(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) return page_actions.is_element_visible(self.driver, selector, by) def is_text_visible(self, text, selector="html", by=By.CSS_SELECTOR): self.wait_for_ready_state_complete() time.sleep(0.01) selector, by = self.__recalculate_selector(selector, by) return page_actions.is_text_visible(self.driver, text, selector, by) def is_link_text_visible(self, link_text): self.wait_for_ready_state_complete() time.sleep(0.01) return page_actions.is_element_visible(self.driver, link_text, by=By.LINK_TEXT) def is_partial_link_text_visible(self, partial_link_text): self.wait_for_ready_state_complete() time.sleep(0.01) return page_actions.is_element_visible(self.driver, partial_link_text, by=By.PARTIAL_LINK_TEXT) def is_link_text_present(self, link_text): soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if html_link.text.strip() == link_text.strip(): return True return False def is_partial_link_text_present(self, link_text): soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if link_text.strip() in html_link.text.strip(): return True return False def get_link_attribute(self, link_text, attribute, hard_fail=True): soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if html_link.text.strip() == link_text.strip(): if html_link.has_attr(attribute): attribute_value = html_link.get(attribute) return attribute_value if hard_fail: raise Exception( 'Unable to find attribute {%s} from link text {%s}!' % (attribute, link_text)) else: return None if hard_fail: raise Exception("Link text {%s} was not found!" % link_text) else: return None def get_link_text_attribute(self, link_text, attribute, hard_fail=True): return self.get_link_attribute(link_text, attribute, hard_fail) def get_partial_link_text_attribute(self, link_text, attribute, hard_fail=True): soup = self.get_beautiful_soup() html_links = soup.find_all('a') for html_link in html_links: if link_text.strip() in html_link.text.strip(): if html_link.has_attr(attribute): attribute_value = html_link.get(attribute) return attribute_value if hard_fail: raise Exception( 'Unable to find attribute {%s} from ' 'partial link text {%s}!' % (attribute, link_text)) else: return None if hard_fail: raise Exception( "Partial Link text {%s} was not found!" % link_text) else: return None def click_link_text(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if self.browser == 'phantomjs': if self.is_link_text_visible(link_text): element = self.wait_for_link_text_visible( link_text, timeout=timeout) element.click() return self.open(self.__get_href_from_link_text(link_text)) return if self.browser == "safari": self.__jquery_click(link_text, by=By.LINK_TEXT) return if not self.is_link_text_present(link_text): self.wait_for_link_text_present(link_text, timeout=timeout) pre_action_url = self.get_current_url() try: element = self.wait_for_link_text_visible( link_text, timeout=0.2) self.__demo_mode_highlight_if_active(link_text, by=By.LINK_TEXT) try: element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_link_text_visible( link_text, timeout=timeout) element.click() except Exception: found_css = False text_id = self.get_link_attribute(link_text, "id", False) if text_id: link_css = '[id="%s"]' % link_text found_css = True if not found_css: href = self.__get_href_from_link_text(link_text, False) if href: if href.startswith('/') or page_utils.is_valid_url(href): link_css = '[href="%s"]' % href found_css = True if not found_css: ngclick = self.get_link_attribute(link_text, "ng-click", False) if ngclick: link_css = '[ng-click="%s"]' % ngclick found_css = True if not found_css: onclick = self.get_link_attribute(link_text, "onclick", False) if onclick: link_css = '[onclick="%s"]' % onclick found_css = True success = False if found_css: if self.is_element_visible(link_css): self.click(link_css) success = True else: success = self.__click_dropdown_link_text( link_text, link_css) if not success: element = self.wait_for_link_text_visible( link_text, timeout=settings.MINI_TIMEOUT) element.click() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def click_link(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.click_link_text(link_text, timeout=timeout) def click_partial_link_text(self, partial_link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if self.browser == 'phantomjs': if self.is_partial_link_text_visible(partial_link_text): element = self.wait_for_partial_link_text(partial_link_text) element.click() return soup = self.get_beautiful_soup() html_links = soup.fetch('a') for html_link in html_links: if partial_link_text in html_link.text: for html_attribute in html_link.attrs: if html_attribute[0] == 'href': href = html_attribute[1] if href.startswith('//'): link = "http:" + href elif href.startswith('/'): url = self.driver.current_url domain_url = self.get_domain_url(url) link = domain_url + href else: link = href self.open(link) return raise Exception( 'Could not parse link from partial link_text ' '{%s}' % partial_link_text) raise Exception( "Partial link text {%s} was not found!" % partial_link_text) if not self.is_partial_link_text_present(partial_link_text): self.wait_for_partial_link_text_present( partial_link_text, timeout=timeout) pre_action_url = self.get_current_url() try: element = self.wait_for_partial_link_text( partial_link_text, timeout=0.2) self.__demo_mode_highlight_if_active( partial_link_text, by=By.LINK_TEXT) try: element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_partial_link_text( partial_link_text, timeout=timeout) element.click() except Exception: found_css = False text_id = self.get_partial_link_text_attribute( partial_link_text, "id", False) if text_id: link_css = '[id="%s"]' % partial_link_text found_css = True if not found_css: href = self.__get_href_from_partial_link_text( partial_link_text, False) if href: if href.startswith('/') or page_utils.is_valid_url(href): link_css = '[href="%s"]' % href found_css = True if not found_css: ngclick = self.get_partial_link_text_attribute( partial_link_text, "ng-click", False) if ngclick: link_css = '[ng-click="%s"]' % ngclick found_css = True if not found_css: onclick = self.get_partial_link_text_attribute( partial_link_text, "onclick", False) if onclick: link_css = '[onclick="%s"]' % onclick found_css = True success = False if found_css: if self.is_element_visible(link_css): self.click(link_css) success = True else: success = self.__click_dropdown_partial_link_text( partial_link_text, link_css) if not success: element = self.wait_for_link_text_visible( partial_link_text, timeout=settings.MINI_TIMEOUT) element.click() if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def get_text(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.wait_for_ready_state_complete() time.sleep(0.01) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout) try: element_text = element.text except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout) element_text = element.text return element_text def get_attribute(self, selector, attribute, by=By.CSS_SELECTOR, timeout=None, hard_fail=True): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() time.sleep(0.01) element = page_actions.wait_for_element_present( self.driver, selector, by, timeout) try: attribute_value = element.get_attribute(attribute) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.06) element = page_actions.wait_for_element_present( self.driver, selector, by, timeout) attribute_value = element.get_attribute(attribute) if attribute_value is not None: return attribute_value else: if hard_fail: raise Exception("Element {%s} has no attribute {%s}!" % ( selector, attribute)) else: return None def set_attribute(self, selector, attribute, value, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) if self.is_element_visible(selector, by=by): try: self.scroll_to(selector, by=by, timeout=timeout) except Exception: pass attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) value = re.escape(value) value = self.__escape_quotes_if_needed(value) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""document.querySelector('%s').setAttribute('%s','%s');""" % (css_selector, attribute, value)) self.execute_script(script) def set_attributes(self, selector, attribute, value, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) value = re.escape(value) value = self.__escape_quotes_if_needed(value) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ $elements[index].setAttribute('%s','%s');}""" % (css_selector, attribute, value)) try: self.execute_script(script) except Exception: pass def set_attribute_all(self, selector, attribute, value, by=By.CSS_SELECTOR): self.set_attributes(selector, attribute, value, by=by) def remove_attribute(self, selector, attribute, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) if self.is_element_visible(selector, by=by): try: self.scroll_to(selector, by=by, timeout=timeout) except Exception: pass attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""document.querySelector('%s').removeAttribute('%s');""" % (css_selector, attribute)) self.execute_script(script) def remove_attributes(self, selector, attribute, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) attribute = re.escape(attribute) attribute = self.__escape_quotes_if_needed(attribute) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ $elements[index].removeAttribute('%s');}""" % (css_selector, attribute)) try: self.execute_script(script) except Exception: pass def get_property_value(self, selector, property, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() page_actions.wait_for_element_present( self.driver, selector, by, timeout) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: raise Exception( "Exception: Could not convert {%s}(by=%s) to CSS_SELECTOR!" % ( selector, by)) selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) script = ("""var $elm = document.querySelector('%s'); $val = window.getComputedStyle($elm).getPropertyValue('%s'); return $val;""" % (selector, property)) value = self.execute_script(script) if value is not None: return value else: return "" def get_image_url(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.get_attribute(selector, attribute='src', by=by, timeout=timeout) def find_elements(self, selector, by=By.CSS_SELECTOR, limit=0): selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() time.sleep(0.05) elements = self.driver.find_elements(by=by, value=selector) if limit and limit > 0 and len(elements) > limit: elements = elements[:limit] return elements def find_visible_elements(self, selector, by=By.CSS_SELECTOR, limit=0): selector, by = self.__recalculate_selector(selector, by) self.wait_for_ready_state_complete() time.sleep(0.05) v_elems = page_actions.find_visible_elements(self.driver, selector, by) if limit and limit > 0 and len(v_elems) > limit: v_elems = v_elems[:limit] return v_elems def click_visible_elements(self, selector, by=By.CSS_SELECTOR, limit=0): selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) elements = self.find_elements(selector, by=by) if self.browser == "safari": if not limit: limit = 0 num_elements = len(elements) if num_elements == 0: raise Exception( "No matching elements found for selector {%s}!" % selector) elif num_elements < limit or limit == 0: limit = num_elements selector, by = self.__recalculate_selector(selector, by) css_selector = self.convert_to_css_selector(selector, by=by) last_css_chunk = css_selector.split(' ')[-1] if ":" in last_css_chunk: self.__js_click_all(css_selector) self.wait_for_ready_state_complete() return else: for i in range(1, limit+1): new_selector = css_selector + ":nth-of-type(%s)" % str(i) if self.is_element_visible(new_selector): self.__js_click(new_selector) self.wait_for_ready_state_complete() return click_count = 0 for element in elements: if limit and limit > 0 and click_count >= limit: return try: if element.is_displayed(): self.__scroll_to_element(element) element.click() click_count += 1 self.wait_for_ready_state_complete() except ECI_Exception: continue # ElementClickInterceptedException (Overlay likely) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.03) try: if element.is_displayed(): self.__scroll_to_element(element) element.click() click_count += 1 self.wait_for_ready_state_complete() except (StaleElementReferenceException, ENI_Exception): return # Probably on new page / Elements are all stale def click_nth_visible_element(self, selector, number, by=By.CSS_SELECTOR): elements = self.find_visible_elements(selector, by=by) if len(elements) < number: raise Exception("Not enough matching {%s} elements of type {%s} to" " click number %s!" % (selector, by, number)) number = number - 1 if number < 0: number = 0 element = elements[number] self.wait_for_ready_state_complete() try: self.__scroll_to_element(element) element.click() except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) self.__scroll_to_element(element) element.click() def click_if_visible(self, selector, by=By.CSS_SELECTOR): self.wait_for_ready_state_complete() if self.is_element_visible(selector, by=by): self.click(selector, by=by) def is_checked(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) kind = self.get_attribute(selector, "type", by=by, timeout=timeout) if kind != "checkbox" and kind != "radio": raise Exception("Expecting a checkbox or a radio button element!") is_checked = self.get_attribute( selector, "checked", by=by, timeout=timeout, hard_fail=False) if is_checked: return True else: # (NoneType) return False def is_selected(self, selector, by=By.CSS_SELECTOR, timeout=None): return self.is_checked(selector, by=by, timeout=timeout) def check_if_unchecked(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) if not self.is_checked(selector, by=by): if self.is_element_visible(selector, by=by): self.click(selector, by=by) else: selector = self.convert_to_css_selector(selector, by=by) self.js_click(selector, by=By.CSS_SELECTOR) def select_if_unselected(self, selector, by=By.CSS_SELECTOR): self.check_if_unchecked(selector, by=by) def uncheck_if_checked(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) if self.is_checked(selector, by=by): if self.is_element_visible(selector, by=by): self.click(selector, by=by) else: selector = self.convert_to_css_selector(selector, by=by) self.js_click(selector, by=By.CSS_SELECTOR) def unselect_if_selected(self, selector, by=By.CSS_SELECTOR): self.uncheck_if_checked(selector, by=by) def is_element_in_an_iframe(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) if self.is_element_present(selector, by=by): return False soup = self.get_beautiful_soup() iframe_list = soup.select('iframe') for iframe in iframe_list: iframe_identifier = None if iframe.has_attr('name') and len(iframe['name']) > 0: iframe_identifier = iframe['name'] elif iframe.has_attr('id') and len(iframe['id']) > 0: iframe_identifier = iframe['id'] elif iframe.has_attr('class') and len(iframe['class']) > 0: iframe_class = " ".join(iframe["class"]) iframe_identifier = '[class="%s"]' % iframe_class else: continue self.switch_to_frame(iframe_identifier) if self.is_element_present(selector, by=by): self.switch_to_default_content() return True self.switch_to_default_content() return False def switch_to_frame_of_element(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) if self.is_element_present(selector, by=by): return None soup = self.get_beautiful_soup() iframe_list = soup.select('iframe') for iframe in iframe_list: iframe_identifier = None if iframe.has_attr('name') and len(iframe['name']) > 0: iframe_identifier = iframe['name'] elif iframe.has_attr('id') and len(iframe['id']) > 0: iframe_identifier = iframe['id'] elif iframe.has_attr('class') and len(iframe['class']) > 0: iframe_class = " ".join(iframe["class"]) iframe_identifier = '[class="%s"]' % iframe_class else: continue try: self.switch_to_frame(iframe_identifier, timeout=1) if self.is_element_present(selector, by=by): return iframe_identifier except Exception: pass self.switch_to_default_content() try: self.switch_to_frame(selector, timeout=1) return selector except Exception: if self.is_element_present(selector, by=by): return "" raise Exception("Could not switch to iframe containing " "element {%s}!" % selector) def hover_on_element(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) if page_utils.is_xpath_selector(selector): selector = self.convert_to_css_selector(selector, By.XPATH) by = By.CSS_SELECTOR self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__demo_mode_highlight_if_active(selector, by) self.scroll_to(selector, by=by) time.sleep(0.05) # Settle down from scrolling before hovering return page_actions.hover_on_element(self.driver, selector) def hover_and_click(self, hover_selector, click_selector, hover_by=By.CSS_SELECTOR, click_by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) hover_selector, hover_by = self.__recalculate_selector( hover_selector, hover_by) hover_selector = self.convert_to_css_selector( hover_selector, hover_by) hover_by = By.CSS_SELECTOR click_selector, click_by = self.__recalculate_selector( click_selector, click_by) dropdown_element = self.wait_for_element_visible( hover_selector, by=hover_by, timeout=timeout) self.__demo_mode_highlight_if_active(hover_selector, hover_by) self.scroll_to(hover_selector, by=hover_by) pre_action_url = self.driver.current_url outdated_driver = False element = None try: if self.browser == "safari": # Use the workaround for hover-clicking on Safari raise Exception("This Exception will be caught.") page_actions.hover_element(self.driver, dropdown_element) except Exception: outdated_driver = True element = self.wait_for_element_present( click_selector, click_by, timeout) if click_by == By.LINK_TEXT: self.open(self.__get_href_from_link_text(click_selector)) elif click_by == By.PARTIAL_LINK_TEXT: self.open(self.__get_href_from_partial_link_text( click_selector)) else: self.js_click(click_selector, click_by) if not outdated_driver: element = page_actions.hover_and_click( self.driver, hover_selector, click_selector, hover_by, click_by, timeout) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() return element def hover_and_double_click(self, hover_selector, click_selector, hover_by=By.CSS_SELECTOR, click_by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) hover_selector, hover_by = self.__recalculate_selector( hover_selector, hover_by) hover_selector = self.convert_to_css_selector( hover_selector, hover_by) click_selector, click_by = self.__recalculate_selector( click_selector, click_by) dropdown_element = self.wait_for_element_visible( hover_selector, by=hover_by, timeout=timeout) self.__demo_mode_highlight_if_active(hover_selector, hover_by) self.scroll_to(hover_selector, by=hover_by) pre_action_url = self.driver.current_url outdated_driver = False element = None try: page_actions.hover_element(self.driver, dropdown_element) except Exception: outdated_driver = True element = self.wait_for_element_present( click_selector, click_by, timeout) if click_by == By.LINK_TEXT: self.open(self.__get_href_from_link_text(click_selector)) elif click_by == By.PARTIAL_LINK_TEXT: self.open(self.__get_href_from_partial_link_text( click_selector)) else: self.js_click(click_selector, click_by) if not outdated_driver: element = page_actions.hover_element_and_double_click( self.driver, dropdown_element, click_selector, click_by=By.CSS_SELECTOR, timeout=timeout) if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() return element def __select_option(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, option_by="text", timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(dropdown_selector): dropdown_by = By.XPATH self.wait_for_ready_state_complete() element = self.wait_for_element_present( dropdown_selector, by=dropdown_by, timeout=timeout) if self.is_element_visible(dropdown_selector, by=dropdown_by): self.__demo_mode_highlight_if_active( dropdown_selector, dropdown_by) pre_action_url = self.driver.current_url try: if option_by == "index": Select(element).select_by_index(option) elif option_by == "value": Select(element).select_by_value(option) else: Select(element).select_by_visible_text(option) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_present( dropdown_selector, by=dropdown_by, timeout=timeout) if option_by == "index": Select(element).select_by_index(option) elif option_by == "value": Select(element).select_by_value(option) else: Select(element).select_by_visible_text(option) if settings.WAIT_FOR_RSC_ON_CLICKS: self.wait_for_ready_state_complete() if self.demo_mode: if self.driver.current_url != pre_action_url: self.__demo_mode_pause_if_active() else: self.__demo_mode_pause_if_active(tiny=True) elif self.slow_mode: self.__slow_mode_pause_if_active() def select_option_by_text(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__select_option(dropdown_selector, option, dropdown_by=dropdown_by, option_by="text", timeout=timeout) def select_option_by_index(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__select_option(dropdown_selector, option, dropdown_by=dropdown_by, option_by="index", timeout=timeout) def select_option_by_value(self, dropdown_selector, option, dropdown_by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__select_option(dropdown_selector, option, dropdown_by=dropdown_by, option_by="value", timeout=timeout) def load_html_string(self, html_string, new_page=True): soup = self.get_beautiful_soup(html_string) scripts = soup.findAll("script") for script in scripts: html_string = html_string.replace(str(script), "") soup = self.get_beautiful_soup(html_string) found_head = False found_body = False html_head = None html_body = None if soup.head and len(str(soup.head)) > 12: found_head = True html_head = str(soup.head) html_head = re.escape(html_head) html_head = self.__escape_quotes_if_needed(html_head) html_head = html_head.replace('\\ ', ' ') if soup.body and len(str(soup.body)) > 12: found_body = True html_body = str(soup.body) html_body = re.escape(html_body) html_body = self.__escape_quotes_if_needed(html_body) html_body = html_body.replace('\\ ', ' ') html_string = re.escape(html_string) html_string = self.__escape_quotes_if_needed(html_string) html_string = html_string.replace('\\ ', ' ') if new_page: self.open("data:text/html,") inner_head = '''document.getElementsByTagName("head")[0].innerHTML''' inner_body = '''document.getElementsByTagName("body")[0].innerHTML''' if not found_body: self.execute_script( '''%s = \"%s\"''' % (inner_body, html_string)) elif found_body and not found_head: self.execute_script( '''%s = \"%s\"''' % (inner_body, html_body)) elif found_body and found_head: self.execute_script( '''%s = \"%s\"''' % (inner_head, html_head)) self.execute_script( '''%s = \"%s\"''' % (inner_body, html_body)) else: raise Exception("Logic Error!") for script in scripts: js_code = script.string js_code_lines = js_code.split('\n') new_lines = [] for line in js_code_lines: line = line.strip() new_lines.append(line) js_code = '\n'.join(new_lines) js_utils.add_js_code(self.driver, js_code) def load_html_file(self, html_file, new_page=True): if len(html_file) < 6 or not html_file.endswith(".html"): raise Exception('Expecting a ".html" file!') abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % html_file f = open(file_path, 'r') html_string = f.read().strip() f.close() self.load_html_string(html_string, new_page) def open_html_file(self, html_file): if len(html_file) < 6 or not html_file.endswith(".html"): raise Exception('Expecting a ".html" file!') abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % html_file self.open("file://" + file_path) def execute_script(self, script): return self.driver.execute_script(script) def execute_async_script(self, script, timeout=None): if not timeout: timeout = settings.EXTREME_TIMEOUT return js_utils.execute_async_script(self.driver, script, timeout) def safe_execute_script(self, script): try: self.execute_script(script) except Exception: # The likely reason this fails is because: "jQuery is not defined" self.activate_jquery() # It's a good thing we can define it here self.execute_script(script) def set_window_rect(self, x, y, width, height): self.driver.set_window_rect(x, y, width, height) self.__demo_mode_pause_if_active() def set_window_size(self, width, height): self.driver.set_window_size(width, height) self.__demo_mode_pause_if_active() def maximize_window(self): self.driver.maximize_window() self.__demo_mode_pause_if_active() def switch_to_frame(self, frame, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) page_actions.switch_to_frame(self.driver, frame, timeout) def switch_to_default_content(self): self.driver.switch_to.default_content() def open_new_window(self, switch_to=True): self.driver.execute_script("window.open('');") time.sleep(0.01) if switch_to: self.switch_to_window(len(self.driver.window_handles) - 1) def switch_to_window(self, window, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) page_actions.switch_to_window(self.driver, window, timeout) def switch_to_default_window(self): self.switch_to_window(0) def get_new_driver(self, browser=None, headless=None, servername=None, port=None, proxy=None, agent=None, switch_to=True, cap_file=None, cap_string=None, disable_csp=None, enable_sync=None, use_auto_ext=None, no_sandbox=None, disable_gpu=None, incognito=None, guest_mode=None, devtools=None, user_data_dir=None, extension_zip=None, extension_dir=None, is_mobile=False, d_width=None, d_height=None, d_p_r=None): if self.browser == "remote" and self.servername == "localhost": raise Exception('Cannot use "remote" browser driver on localhost!' ' Did you mean to connect to a remote Grid server' ' such as BrowserStack or Sauce Labs? In that' ' case, you must specify the "server" and "port"' ' parameters on the command line! ' 'Example: ' '--server=user:key@hub.browserstack.com --port=80') browserstack_ref = ( 'https://browserstack.com/automate/capabilities') sauce_labs_ref = ( 'https://wiki.saucelabs.com/display/DOCS/Platform+Configurator#/') if self.browser == "remote" and not (self.cap_file or self.cap_string): raise Exception('Need to specify a desired capabilities file when ' 'using "--browser=remote". Add "--cap_file=FILE". ' 'File should be in the Python format used by: ' '%s OR ' '%s ' 'See SeleniumBase/examples/sample_cap_file_BS.py ' 'and SeleniumBase/examples/sample_cap_file_SL.py' % (browserstack_ref, sauce_labs_ref)) if browser is None: browser = self.browser browser_name = browser if headless is None: headless = self.headless if servername is None: servername = self.servername if port is None: port = self.port use_grid = False if servername != "localhost": use_grid = True proxy_string = proxy if proxy_string is None: proxy_string = self.proxy_string user_agent = agent if user_agent is None: user_agent = self.user_agent if disable_csp is None: disable_csp = self.disable_csp if enable_sync is None: enable_sync = self.enable_sync if use_auto_ext is None: use_auto_ext = self.use_auto_ext if no_sandbox is None: no_sandbox = self.no_sandbox if disable_gpu is None: disable_gpu = self.disable_gpu if incognito is None: incognito = self.incognito if guest_mode is None: guest_mode = self.guest_mode if devtools is None: devtools = self.devtools if user_data_dir is None: user_data_dir = self.user_data_dir if extension_zip is None: extension_zip = self.extension_zip if extension_dir is None: extension_dir = self.extension_dir test_id = self.__get_test_id() if cap_file is None: cap_file = self.cap_file if cap_string is None: cap_string = self.cap_string if is_mobile is None: is_mobile = False if d_width is None: d_width = self.__device_width if d_height is None: d_height = self.__device_height if d_p_r is None: d_p_r = self.__device_pixel_ratio valid_browsers = constants.ValidBrowsers.valid_browsers if browser_name not in valid_browsers: raise Exception("Browser: {%s} is not a valid browser option. " "Valid options = {%s}" % (browser, valid_browsers)) from seleniumbase.core import browser_launcher new_driver = browser_launcher.get_driver(browser_name=browser_name, headless=headless, use_grid=use_grid, servername=servername, port=port, proxy_string=proxy_string, user_agent=user_agent, cap_file=cap_file, cap_string=cap_string, disable_csp=disable_csp, enable_sync=enable_sync, use_auto_ext=use_auto_ext, no_sandbox=no_sandbox, disable_gpu=disable_gpu, incognito=incognito, guest_mode=guest_mode, devtools=devtools, user_data_dir=user_data_dir, extension_zip=extension_zip, extension_dir=extension_dir, test_id=test_id, mobile_emulator=is_mobile, device_width=d_width, device_height=d_height, device_pixel_ratio=d_p_r) self._drivers_list.append(new_driver) if switch_to: self.driver = new_driver if self.headless: width = settings.HEADLESS_START_WIDTH height = settings.HEADLESS_START_HEIGHT try: self.driver.set_window_size(width, height) self.wait_for_ready_state_complete() except Exception: # get safely through setUp() so that # WebDrivers can get closed during tearDown(). pass else: if self.browser == 'chrome' or self.browser == 'edge': width = settings.CHROME_START_WIDTH height = settings.CHROME_START_HEIGHT try: if self.maximize_option: self.driver.maximize_window() else: self.driver.set_window_size(width, height) self.wait_for_ready_state_complete() except Exception: pass # Keep existing browser resolution elif self.browser == 'firefox': pass # No changes elif self.browser == 'safari': if self.maximize_option: try: self.driver.maximize_window() self.wait_for_ready_state_complete() except Exception: pass # Keep existing browser resolution else: try: self.driver.set_window_rect(10, 30, 945, 630) except Exception: pass if self.start_page and len(self.start_page) >= 4: if page_utils.is_valid_url(self.start_page): self.open(self.start_page) else: new_start_page = "http://" + self.start_page if page_utils.is_valid_url(new_start_page): self.open(new_start_page) return new_driver def switch_to_driver(self, driver): self.driver = driver def switch_to_default_driver(self): self.driver = self._default_driver def save_screenshot(self, name, folder=None): return page_actions.save_screenshot(self.driver, name, folder) def save_page_source(self, name, folder=None): return page_actions.save_page_source(self.driver, name, folder) def save_cookies(self, name="cookies.txt"): cookies = self.driver.get_cookies() json_cookies = json.dumps(cookies) if name.endswith('/'): raise Exception("Invalid filename for Cookies!") if '/' in name: name = name.split('/')[-1] if len(name) < 1: raise Exception("Filename for Cookies is too short!") if not name.endswith(".txt"): name = name + ".txt" folder = constants.SavedCookies.STORAGE_FOLDER abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % folder if not os.path.exists(file_path): os.makedirs(file_path) cookies_file_path = "%s/%s" % (file_path, name) cookies_file = codecs.open(cookies_file_path, "w+") cookies_file.writelines(json_cookies) cookies_file.close() def load_cookies(self, name="cookies.txt"): if name.endswith('/'): raise Exception("Invalid filename for Cookies!") if '/' in name: name = name.split('/')[-1] if len(name) < 1: raise Exception("Filename for Cookies is too short!") if not name.endswith(".txt"): name = name + ".txt" folder = constants.SavedCookies.STORAGE_FOLDER abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % folder cookies_file_path = "%s/%s" % (file_path, name) f = open(cookies_file_path, 'r') json_cookies = f.read().strip() f.close() cookies = json.loads(json_cookies) for cookie in cookies: if 'expiry' in cookie: del cookie['expiry'] self.driver.add_cookie(cookie) def delete_all_cookies(self): self.driver.delete_all_cookies() def delete_saved_cookies(self, name="cookies.txt"): if name.endswith('/'): raise Exception("Invalid filename for Cookies!") if '/' in name: name = name.split('/')[-1] if len(name) < 1: raise Exception("Filename for Cookies is too short!") if not name.endswith(".txt"): name = name + ".txt" folder = constants.SavedCookies.STORAGE_FOLDER abs_path = os.path.abspath('.') file_path = abs_path + "/%s" % folder cookies_file_path = "%s/%s" % (file_path, name) if os.path.exists(cookies_file_path): if cookies_file_path.endswith('.txt'): os.remove(cookies_file_path) def wait_for_ready_state_complete(self, timeout=None): try: # If there's an alert, skip self.driver.switch_to.alert return except Exception: pass if not timeout: timeout = settings.EXTREME_TIMEOUT if self.timeout_multiplier and timeout == settings.EXTREME_TIMEOUT: timeout = self.__get_new_timeout(timeout) is_ready = js_utils.wait_for_ready_state_complete(self.driver, timeout) self.wait_for_angularjs(timeout=settings.MINI_TIMEOUT) if self.js_checking_on: self.assert_no_js_errors() if self.ad_block_on: # If the ad_block feature is enabled, then block ads for new URLs current_url = self.get_current_url() if not current_url == self.__last_page_load_url: time.sleep(0.02) self.ad_block() time.sleep(0.01) if self.is_element_present("iframe"): time.sleep(0.07) # iframe ads take slightly longer to load self.ad_block() # Do ad_block on slower-loading iframes self.__last_page_load_url = current_url return is_ready def wait_for_angularjs(self, timeout=None, **kwargs): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) js_utils.wait_for_angularjs(self.driver, timeout, **kwargs) def sleep(self, seconds): if not sb_config.time_limit: time.sleep(seconds) else: start_ms = time.time() * 1000.0 stop_ms = start_ms + (seconds * 1000.0) for x in range(int(seconds * 5)): shared_utils.check_if_time_limit_exceeded() now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(0.2) def activate_jquery(self): js_utils.activate_jquery(self.driver) self.wait_for_ready_state_complete() def __are_quotes_escaped(self, string): return js_utils.are_quotes_escaped(string) def __escape_quotes_if_needed(self, string): return js_utils.escape_quotes_if_needed(string) def bring_to_front(self, selector, by=By.CSS_SELECTOR): if page_utils.is_xpath_selector(selector): by = By.XPATH self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't run action if can't convert to CSS_Selector for JavaScript return selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) script = ("""document.querySelector('%s').style.zIndex = '999999';""" % selector) self.execute_script(script) def highlight_click(self, selector, by=By.CSS_SELECTOR, loops=3, scroll=True): if not self.demo_mode: self.highlight(selector, by=by, loops=loops, scroll=scroll) self.click(selector, by=by) def highlight_update_text(self, selector, new_value, by=By.CSS_SELECTOR, loops=3, scroll=True): if not self.demo_mode: self.highlight(selector, by=by, loops=loops, scroll=scroll) self.update_text(selector, new_value, by=by) def highlight(self, selector, by=By.CSS_SELECTOR, loops=None, scroll=True): selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) if not loops: loops = settings.HIGHLIGHTS if scroll: try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__slow_scroll_to_element(element) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't highlight if can't convert to CSS_SELECTOR return if self.highlights: loops = self.highlights if self.browser == 'ie': loops = 1 # Override previous setting because IE is slow loops = int(loops) o_bs = '' # original_box_shadow try: style = element.get_attribute('style') except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=By.CSS_SELECTOR, timeout=settings.SMALL_TIMEOUT) style = element.get_attribute('style') if style: if 'box-shadow: ' in style: box_start = style.find('box-shadow: ') box_end = style.find(';', box_start) + 1 original_box_shadow = style[box_start:box_end] o_bs = original_box_shadow if ":contains" not in selector and ":first" not in selector: selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) self.__highlight_with_js(selector, loops, o_bs) else: selector = self.__make_css_match_first_element_only(selector) selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) try: self.__highlight_with_jquery(selector, loops, o_bs) except Exception: pass # JQuery probably couldn't load. Skip highlighting. time.sleep(0.065) def __highlight_with_js(self, selector, loops, o_bs): js_utils.highlight_with_js(self.driver, selector, loops, o_bs) def __highlight_with_jquery(self, selector, loops, o_bs): js_utils.highlight_with_jquery(self.driver, selector, loops, o_bs) def press_up_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_UP) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_UP) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def press_down_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_DOWN) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_DOWN) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def press_left_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_LEFT) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_LEFT) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def press_right_arrow(self, selector="html", times=1, by=By.CSS_SELECTOR): if times < 1: return element = self.wait_for_element_present(selector) self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) for i in range(int(times)): try: element.send_keys(Keys.ARROW_RIGHT) except Exception: self.wait_for_ready_state_complete() element = self.wait_for_element_visible(selector) element.send_keys(Keys.ARROW_RIGHT) time.sleep(0.01) if self.slow_mode: time.sleep(0.1) def scroll_to(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) if self.demo_mode or self.slow_mode: self.slow_scroll_to(selector, by=by, timeout=timeout) return element = self.wait_for_element_visible( selector, by=by, timeout=timeout) try: self.__scroll_to_element(element, selector, by) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__scroll_to_element(element, selector, by) def slow_scroll_to(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__slow_scroll_to_element(element) def scroll_to_top(self): scroll_script = "window.scrollTo(0, 0);" try: self.execute_script(scroll_script) time.sleep(0.012) return True except Exception: return False def scroll_to_bottom(self): scroll_script = "window.scrollTo(0, 10000);" try: self.execute_script(scroll_script) time.sleep(0.012) return True except Exception: return False def click_xpath(self, xpath): self.click(xpath, by=By.XPATH) def js_click(self, selector, by=By.CSS_SELECTOR, all_matches=False): selector, by = self.__recalculate_selector(selector, by) if by == By.LINK_TEXT: message = ( "Pure JavaScript doesn't support clicking by Link Text. " "You may want to use self.jquery_click() instead, which " "allows this with :contains(), assuming jQuery isn't blocked. " "For now, self.js_click() will use a regular WebDriver click.") logging.debug(message) self.click(selector, by=by) return element = self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) if self.is_element_visible(selector, by=by): self.__demo_mode_highlight_if_active(selector, by) if not self.demo_mode: self.__scroll_to_element(element, selector, by) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) if not all_matches: self.__js_click(selector, by=by) else: self.__js_click_all(selector, by=by) self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def js_click_all(self, selector, by=By.CSS_SELECTOR): self.js_click(selector, by=By.CSS_SELECTOR, all_matches=True) def jquery_click(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) if self.is_element_visible(selector, by=by): self.__demo_mode_highlight_if_active(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) click_script = """jQuery('%s')[0].click()""" % selector self.safe_execute_script(click_script) self.__demo_mode_pause_if_active() def jquery_click_all(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) if self.is_element_visible(selector, by=by): self.__demo_mode_highlight_if_active(selector, by) selector = self.convert_to_css_selector(selector, by=by) click_script = """jQuery('%s').click()""" % selector self.safe_execute_script(click_script) self.__demo_mode_pause_if_active() def hide_element(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) hide_script = """jQuery('%s').hide()""" % selector self.safe_execute_script(hide_script) def hide_elements(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) hide_script = """jQuery('%s').hide()""" % selector self.safe_execute_script(hide_script) def show_element(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) show_script = """jQuery('%s').show(0)""" % selector self.safe_execute_script(show_script) def show_elements(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) show_script = """jQuery('%s').show(0)""" % selector self.safe_execute_script(show_script) def remove_element(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) remove_script = """jQuery('%s').remove()""" % selector self.safe_execute_script(remove_script) def remove_elements(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) remove_script = """jQuery('%s').remove()""" % selector self.safe_execute_script(remove_script) def ad_block(self): self.wait_for_ready_state_complete() from seleniumbase.config import ad_block_list for css_selector in ad_block_list.AD_BLOCK_LIST: css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ $elements[index].remove();}""" % css_selector) try: self.execute_script(script) except Exception: pass def block_ads(self): self.ad_block() def get_domain_url(self, url): return page_utils.get_domain_url(url) def get_beautiful_soup(self, source=None): from bs4 import BeautifulSoup if not source: self.wait_for_ready_state_complete() source = self.get_page_source() soup = BeautifulSoup(source, "html.parser") return soup def get_unique_links(self): page_url = self.get_current_url() soup = self.get_beautiful_soup(self.get_page_source()) links = page_utils._get_unique_links(page_url, soup) return links def get_link_status_code(self, link, allow_redirects=False, timeout=5): status_code = page_utils._get_link_status_code( link, allow_redirects=allow_redirects, timeout=timeout) return status_code def assert_link_status_code_is_not_404(self, link): status_code = str(self.get_link_status_code(link)) bad_link_str = 'Error: "%s" returned a 404!' % link self.assertNotEqual(status_code, "404", bad_link_str) def assert_no_404_errors(self, multithreaded=True): all_links = self.get_unique_links() links = [] for link in all_links: if "javascript:" not in link and "mailto:" not in link: links.append(link) if multithreaded: from multiprocessing.dummy import Pool as ThreadPool pool = ThreadPool(10) pool.map(self.assert_link_status_code_is_not_404, links) pool.close() pool.join() else: for link in links: self.assert_link_status_code_is_not_404(link) if self.demo_mode: messenger_post = ("ASSERT NO 404 ERRORS") self.__highlight_with_assert_success(messenger_post, "html") def print_unique_links_with_status_codes(self): page_url = self.get_current_url() soup = self.get_beautiful_soup(self.get_page_source()) page_utils._print_unique_links_with_status_codes(page_url, soup) def __fix_unicode_conversion(self, text): if sys.version_info[0] < 3: # Update encoding for Python 2 users reload(sys) # noqa sys.setdefaultencoding('utf8') text = text.replace(u'\u2f8f', u'\u884c') text = text.replace(u'\u2f45', u'\u65b9') text = text.replace(u'\u2f08', u'\u4eba') text = text.replace(u'\u2f70', u'\u793a') return text def get_pdf_text(self, pdf, page=None, maxpages=None, password=None, codec='utf-8', wrap=False, nav=False, override=False): import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore", category=UserWarning) from pdfminer.high_level import extract_text if not password: password = '' if not maxpages: maxpages = 0 if not pdf.lower().endswith('.pdf'): raise Exception("%s is not a PDF file! (Expecting a .pdf)" % pdf) file_path = None if page_utils.is_valid_url(pdf): if nav: if self.get_current_url() != pdf: self.open(pdf) file_name = pdf.split('/')[-1] file_path = self.get_downloads_folder() + '/' + file_name if not os.path.exists(file_path): self.download_file(pdf) elif override: self.download_file(pdf) else: if not os.path.exists(pdf): raise Exception("%s is not a valid URL or file path!" % pdf) file_path = os.path.abspath(pdf) page_search = None # (Pages are delimited by '\x0c') if type(page) is list: pages = page page_search = [] for page in pages: page_search.append(page - 1) elif type(page) is int: page = page - 1 if page < 0: page = 0 page_search = [page] else: page_search = None pdf_text = extract_text( file_path, password='', page_numbers=page_search, maxpages=maxpages, caching=False, codec=codec) pdf_text = self.__fix_unicode_conversion(pdf_text) if wrap: pdf_text = pdf_text.replace(' \n', ' ') return pdf_text def assert_pdf_text(self, pdf, text, page=None, maxpages=None, password=None, codec='utf-8', wrap=True, nav=False, override=False): text = self.__fix_unicode_conversion(text) if not codec: codec = 'utf-8' pdf_text = self.get_pdf_text( pdf, page=page, maxpages=maxpages, password=password, codec=codec, wrap=wrap, nav=nav, override=override) if type(page) is int: if text not in pdf_text: raise Exception("PDF [%s] is missing expected text [%s] on " "page [%s]!" % (pdf, text, page)) else: if text not in pdf_text: raise Exception("PDF [%s] is missing expected text [%s]!" "" % (pdf, text)) return True def create_folder(self, folder): if folder.endswith("/"): folder = folder[:-1] if len(folder) < 1: raise Exception("Minimum folder name length = 1.") if not os.path.exists(folder): try: os.makedirs(folder) except Exception: pass def choose_file(self, selector, file_path, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH abs_path = os.path.abspath(file_path) self.add_text(selector, abs_path, by=by, timeout=timeout) def save_element_as_image_file(self, selector, file_name, folder=None): element = self.wait_for_element_visible(selector) element_png = element.screenshot_as_png if len(file_name.split('.')[0]) < 1: raise Exception("Error: file_name length must be > 0.") if not file_name.endswith(".png"): file_name = file_name + ".png" image_file_path = None if folder: if folder.endswith("/"): folder = folder[:-1] if len(folder) > 0: self.create_folder(folder) image_file_path = "%s/%s" % (folder, file_name) if not image_file_path: image_file_path = file_name with open(image_file_path, "wb") as file: file.write(element_png) def download_file(self, file_url, destination_folder=None): if not destination_folder: destination_folder = constants.Files.DOWNLOADS_FOLDER if not os.path.exists(destination_folder): os.makedirs(destination_folder) page_utils._download_file_to(file_url, destination_folder) def save_file_as(self, file_url, new_file_name, destination_folder=None): if not destination_folder: destination_folder = constants.Files.DOWNLOADS_FOLDER page_utils._download_file_to( file_url, destination_folder, new_file_name) def save_data_as(self, data, file_name, destination_folder=None): if not destination_folder: destination_folder = constants.Files.DOWNLOADS_FOLDER page_utils._save_data_as(data, destination_folder, file_name) def get_downloads_folder(self): return download_helper.get_downloads_folder() def get_path_of_downloaded_file(self, file): return os.path.join(self.get_downloads_folder(), file) def is_downloaded_file_present(self, file): return os.path.exists(self.get_path_of_downloaded_file(file)) def assert_downloaded_file(self, file, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) start_ms = time.time() * 1000.0 stop_ms = start_ms + (timeout * 1000.0) for x in range(int(timeout)): shared_utils.check_if_time_limit_exceeded() try: self.assertTrue( os.path.exists(self.get_path_of_downloaded_file(file)), "File [%s] was not found in the downloads folder [%s]!" "" % (file, self.get_downloads_folder())) if self.demo_mode: messenger_post = ("ASSERT DOWNLOADED FILE: [%s]" % file) js_utils.post_messenger_success_message( self.driver, messenger_post, self.message_duration) return except Exception: now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(1) self.assertTrue( os.path.exists(self.get_path_of_downloaded_file(file)), "File [%s] was not found in the downloads folder [%s] " "after %s seconds! (Or the download didn't complete!)" "" % (file, self.get_downloads_folder(), timeout)) if self.demo_mode: messenger_post = ("ASSERT DOWNLOADED FILE: [%s]" % file) js_utils.post_messenger_success_message( self.driver, messenger_post, self.message_duration) def assert_true(self, expr, msg=None): self.assertTrue(expr, msg=msg) def assert_false(self, expr, msg=None): self.assertFalse(expr, msg=msg) def assert_equal(self, first, second, msg=None): self.assertEqual(first, second, msg=msg) def assert_not_equal(self, first, second, msg=None): self.assertNotEqual(first, second, msg=msg) def assert_raises(self, *args, **kwargs): self.assertRaises(*args, **kwargs) def assert_title(self, title): expected = title actual = self.get_page_title() self.assertEqual(expected, actual, "Expected page title [%s] " "does not match the actual page title [%s]!" "" % (expected, actual)) if self.demo_mode: messenger_post = ("ASSERT TITLE = {%s}" % title) self.__highlight_with_assert_success(messenger_post, "html") def assert_no_js_errors(self): time.sleep(0.1) try: browser_logs = self.driver.get_log('browser') except (ValueError, WebDriverException): return messenger_library = "//cdnjs.cloudflare.com/ajax/libs/messenger" errors = [] for entry in browser_logs: if entry['level'] == 'SEVERE': if messenger_library not in entry['message']: errors.append(entry) if len(errors) > 0: current_url = self.get_current_url() raise Exception( "JavaScript errors found on %s => %s" % (current_url, errors)) if self.demo_mode: if (self.browser == 'chrome' or self.browser == 'edge'): messenger_post = ("ASSERT NO JS ERRORS") self.__highlight_with_assert_success(messenger_post, "html") def __activate_html_inspector(self): self.wait_for_ready_state_complete() time.sleep(0.05) js_utils.activate_html_inspector(self.driver) def inspect_html(self): self.__activate_html_inspector() script = ("""HTMLInspector.inspect();""") self.execute_script(script) time.sleep(0.1) browser_logs = [] try: browser_logs = self.driver.get_log('browser') except (ValueError, WebDriverException): return("(Unable to Inspect HTML! -> Only works on Chrome!)") messenger_library = "//cdnjs.cloudflare.com/ajax/libs/messenger" url = self.get_current_url() header = '\n* HTML Inspection Results: %s' % url results = [header] row_count = 0 for entry in browser_logs: message = entry['message'] if "0:6053 " in message: message = message.split("0:6053")[1] message = message.replace("\\u003C", "<") if message.startswith(' "') and message.count('"') == 2: message = message.split('"')[1] message = "X - " + message if messenger_library not in message: if message not in results: results.append(message) row_count += 1 if row_count > 0: results.append('* (See the Console output for details!)') else: results.append('* (No issues detected!)') results = '\n'.join(results) print(results) return(results) def get_google_auth_password(self, totp_key=None): import pyotp if not totp_key: totp_key = settings.TOTP_KEY epoch_interval = time.time() / 30.0 cycle_lifespan = float(epoch_interval) - int(epoch_interval) if float(cycle_lifespan) > 0.95: # Password expires in the next 1.5 seconds. Wait for a new one. for i in range(30): time.sleep(0.05) epoch_interval = time.time() / 30.0 cycle_lifespan = float(epoch_interval) - int(epoch_interval) if not float(cycle_lifespan) > 0.95: # The new password cycle has begun break totp = pyotp.TOTP(totp_key) return str(totp.now()) def convert_xpath_to_css(self, xpath): return xpath_to_css.convert_xpath_to_css(xpath) def convert_to_css_selector(self, selector, by): if by == By.CSS_SELECTOR: return selector elif by == By.ID: return '#%s' % selector elif by == By.CLASS_NAME: return '.%s' % selector elif by == By.NAME: return '[name="%s"]' % selector elif by == By.TAG_NAME: return selector elif by == By.XPATH: return self.convert_xpath_to_css(selector) elif by == By.LINK_TEXT: return 'a:contains("%s")' % selector elif by == By.PARTIAL_LINK_TEXT: return 'a:contains("%s")' % selector else: raise Exception( "Exception: Could not convert {%s}(by=%s) to CSS_SELECTOR!" % ( selector, by)) def set_value(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH orginal_selector = selector css_selector = self.convert_to_css_selector(selector, by=by) self.__demo_mode_highlight_if_active(orginal_selector, by) if not self.demo_mode: self.scroll_to(orginal_selector, by=by, timeout=timeout) value = re.escape(new_value) value = self.__escape_quotes_if_needed(value) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""document.querySelector('%s').value='%s';""" % (css_selector, value)) self.execute_script(script) if new_value.endswith('\n'): element = self.wait_for_element_present( orginal_selector, by=by, timeout=timeout) element.send_keys(Keys.RETURN) if settings.WAIT_FOR_RSC_ON_PAGE_LOADS: self.wait_for_ready_state_complete() self.__demo_mode_pause_if_active() def js_update_text(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.set_value( selector, new_value, by=by, timeout=timeout) def jquery_update_text(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH element = self.wait_for_element_visible( selector, by=by, timeout=timeout) self.__demo_mode_highlight_if_active(selector, by) self.scroll_to(selector, by=by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) selector = self.__escape_quotes_if_needed(selector) new_value = re.escape(new_value) new_value = self.__escape_quotes_if_needed(new_value) update_text_script = """jQuery('%s').val('%s')""" % ( selector, new_value) self.safe_execute_script(update_text_script) if new_value.endswith('\n'): element.send_keys('\n') self.__demo_mode_pause_if_active() def set_time_limit(self, time_limit): if time_limit: try: sb_config.time_limit = float(time_limit) except Exception: sb_config.time_limit = None else: sb_config.time_limit = None if sb_config.time_limit and sb_config.time_limit > 0: sb_config.time_limit_ms = int(sb_config.time_limit * 1000.0) self.time_limit = sb_config.time_limit else: self.time_limit = None sb_config.time_limit = None sb_config.time_limit_ms = None def skip(self, reason=""): self.skipTest(reason) ############ def add_css_link(self, css_link): js_utils.add_css_link(self.driver, css_link) def add_js_link(self, js_link): js_utils.add_js_link(self.driver, js_link) def add_css_style(self, css_style): js_utils.add_css_style(self.driver, css_style) def add_js_code_from_link(self, js_link): js_utils.add_js_code_from_link(self.driver, js_link) def add_js_code(self, js_code): js_utils.add_js_code(self.driver, js_code) def add_meta_tag(self, http_equiv=None, content=None): js_utils.add_meta_tag( self.driver, http_equiv=http_equiv, content=content) ############ def create_tour(self, name=None, theme=None): if not name: name = "default" if theme: if theme.lower() == "bootstrap": self.create_bootstrap_tour(name) return elif theme.lower() == "hopscotch": self.create_hopscotch_tour(name) return elif theme.lower() == "intro": self.create_introjs_tour(name) return elif theme.lower() == "introjs": self.create_introjs_tour(name) return elif theme.lower() == "shepherd": self.create_shepherd_tour(name, theme="light") return else: self.create_shepherd_tour(name, theme) else: self.create_shepherd_tour(name, theme="light") def create_shepherd_tour(self, name=None, theme=None): shepherd_theme = "shepherd-theme-arrows" if theme: if theme.lower() == "default": shepherd_theme = "shepherd-theme-default" elif theme.lower() == "dark": shepherd_theme = "shepherd-theme-dark" elif theme.lower() == "light": shepherd_theme = "shepherd-theme-arrows" elif theme.lower() == "arrows": shepherd_theme = "shepherd-theme-arrows" elif theme.lower() == "square": shepherd_theme = "shepherd-theme-square" elif theme.lower() == "square-dark": shepherd_theme = "shepherd-theme-square-dark" if not name: name = "default" new_tour = ( """ // Shepherd Tour var tour = new Shepherd.Tour({ defaults: { classes: '%s', scrollTo: true } }); var allButtons = { skip: { text: "Skip", action: tour.cancel, classes: 'shepherd-button-secondary tour-button-left' }, back: { text: "Back", action: tour.back, classes: 'shepherd-button-secondary' }, next: { text: "Next", action: tour.next, classes: 'shepherd-button-primary tour-button-right' }, }; var firstStepButtons = [allButtons.skip, allButtons.next]; var midTourButtons = [allButtons.back, allButtons.next]; """ % shepherd_theme) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def create_bootstrap_tour(self, name=None): if not name: name = "default" new_tour = ( """ // Bootstrap Tour var tour = new Tour({ }); tour.addSteps([ """) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def create_hopscotch_tour(self, name=None): if not name: name = "default" new_tour = ( """ // Hopscotch Tour var tour = { id: "hopscotch_tour", steps: [ """) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def create_introjs_tour(self, name=None): if not name: name = "default" new_tour = ( """ // IntroJS Tour function startIntro(){ var intro = introJs(); intro.setOptions({ steps: [ """) self._tour_steps[name] = [] self._tour_steps[name].append(new_tour) def add_tour_step(self, message, selector=None, name=None, title=None, theme=None, alignment=None, duration=None): if not selector: selector = "html" if page_utils.is_xpath_selector(selector): selector = self.convert_to_css_selector(selector, By.XPATH) selector = self.__escape_quotes_if_needed(selector) if not name: name = "default" if name not in self._tour_steps: # By default, will create an IntroJS tour if no tours exist self.create_tour(name=name, theme="introjs") if not title: title = "" title = self.__escape_quotes_if_needed(title) if message: message = self.__escape_quotes_if_needed(message) else: message = "" if not alignment or ( alignment not in ["top", "bottom", "left", "right"]): if "Hopscotch" not in self._tour_steps[name][0]: alignment = "top" else: alignment = "bottom" if "Bootstrap" in self._tour_steps[name][0]: self.__add_bootstrap_tour_step( message, selector=selector, name=name, title=title, alignment=alignment, duration=duration) elif "Hopscotch" in self._tour_steps[name][0]: self.__add_hopscotch_tour_step( message, selector=selector, name=name, title=title, alignment=alignment) elif "IntroJS" in self._tour_steps[name][0]: self.__add_introjs_tour_step( message, selector=selector, name=name, title=title, alignment=alignment) else: self.__add_shepherd_tour_step( message, selector=selector, name=name, title=title, theme=theme, alignment=alignment) def __add_shepherd_tour_step(self, message, selector=None, name=None, title=None, theme=None, alignment=None): if theme == "default": shepherd_theme = "shepherd-theme-default" elif theme == "dark": shepherd_theme = "shepherd-theme-dark" elif theme == "light": shepherd_theme = "shepherd-theme-arrows" elif theme == "arrows": shepherd_theme = "shepherd-theme-arrows" elif theme == "square": shepherd_theme = "shepherd-theme-square" elif theme == "square-dark": shepherd_theme = "shepherd-theme-square-dark" else: shepherd_base_theme = re.search( r"[\S\s]+classes: '([\S\s]+)',[\S\s]+", self._tour_steps[name][0]).group(1) shepherd_theme = shepherd_base_theme shepherd_classes = shepherd_theme if selector == "html": shepherd_classes += " shepherd-orphan" buttons = "firstStepButtons" if len(self._tour_steps[name]) > 1: buttons = "midTourButtons" step = (""" tour.addStep('%s', { title: '%s', classes: '%s', text: '%s', attachTo: {element: '%s', on: '%s'}, buttons: %s, advanceOn: '.docs-link click' });""" % ( name, title, shepherd_classes, message, selector, alignment, buttons)) self._tour_steps[name].append(step) def __add_bootstrap_tour_step(self, message, selector=None, name=None, title=None, alignment=None, duration=None): if selector != "html": selector = self.__make_css_match_first_element_only(selector) element_row = "element: '%s'," % selector else: element_row = "" if not duration: duration = "0" else: duration = str(float(duration) * 1000.0) step = ("""{ %s title: '%s', content: '%s', orphan: true, placement: 'auto %s', smartPlacement: true, duration: %s, },""" % (element_row, title, message, alignment, duration)) self._tour_steps[name].append(step) def __add_hopscotch_tour_step(self, message, selector=None, name=None, title=None, alignment=None): arrow_offset_row = None if not selector or selector == "html": selector = "head" alignment = "bottom" arrow_offset_row = "arrowOffset: '200'," else: arrow_offset_row = "" step = ("""{ target: '%s', title: '%s', content: '%s', %s showPrevButton: 'true', scrollDuration: '550', placement: '%s'}, """ % (selector, title, message, arrow_offset_row, alignment)) self._tour_steps[name].append(step) def __add_introjs_tour_step(self, message, selector=None, name=None, title=None, alignment=None): if selector != "html": element_row = "element: '%s'," % selector else: element_row = "" if title: message = "<center><b>" + title + "</b></center><hr>" + message message = '<font size=\"3\" color=\"#33475B\">' + message + '</font>' step = ("""{%s intro: '%s', position: '%s'}, """ % (element_row, message, alignment)) self._tour_steps[name].append(step) def play_tour(self, name=None, interval=0): if self.headless: return # Tours should not run in headless mode. if not name: name = "default" if name not in self._tour_steps: raise Exception("Tour {%s} does not exist!" % name) if "Bootstrap" in self._tour_steps[name][0]: tour_helper.play_bootstrap_tour( self.driver, self._tour_steps, self.browser, self.message_duration, name=name, interval=interval) elif "Hopscotch" in self._tour_steps[name][0]: tour_helper.play_hopscotch_tour( self.driver, self._tour_steps, self.browser, self.message_duration, name=name, interval=interval) elif "IntroJS" in self._tour_steps[name][0]: tour_helper.play_introjs_tour( self.driver, self._tour_steps, self.browser, self.message_duration, name=name, interval=interval) else: # "Shepherd" tour_helper.play_shepherd_tour( self.driver, self._tour_steps, self.message_duration, name=name, interval=interval) def export_tour(self, name=None, filename="my_tour.js", url=None): if not url: url = self.get_current_url() tour_helper.export_tour( self._tour_steps, name=name, filename=filename, url=url) def activate_jquery_confirm(self): js_utils.activate_jquery_confirm(self.driver) self.wait_for_ready_state_complete() def activate_messenger(self): js_utils.activate_messenger(self.driver) self.wait_for_ready_state_complete() def set_messenger_theme(self, theme="default", location="default", max_messages="default"): if not theme: theme = "default" # "future" if not location: location = "default" # "bottom_right" if not max_messages: max_messages = "default" # "8" js_utils.set_messenger_theme( self.driver, theme=theme, location=location, max_messages=max_messages) def post_message(self, message, duration=None, pause=True, style="info"): if not duration: if not self.message_duration: duration = settings.DEFAULT_MESSAGE_DURATION else: duration = self.message_duration js_utils.post_message( self.driver, message, duration, style=style) if pause: duration = float(duration) + 0.15 time.sleep(float(duration)) def post_success_message(self, message, duration=None, pause=True): if not duration: if not self.message_duration: duration = settings.DEFAULT_MESSAGE_DURATION else: duration = self.message_duration js_utils.post_message( self.driver, message, duration, style="success") if pause: duration = float(duration) + 0.15 time.sleep(float(duration)) def post_error_message(self, message, duration=None, pause=True): if not duration: if not self.message_duration: duration = settings.DEFAULT_MESSAGE_DURATION else: duration = self.message_duration js_utils.post_message( self.driver, message, duration, style="error") if pause: duration = float(duration) + 0.15 time.sleep(float(duration)) ############ def generate_referral(self, start_page, destination_page): if not page_utils.is_valid_url(destination_page): raise Exception( "Exception: destination_page {%s} is not a valid URL!" % destination_page) if start_page: if not page_utils.is_valid_url(start_page): raise Exception( "Exception: start_page {%s} is not a valid URL! " "(Use an empty string or None to start from current page.)" % start_page) self.open(start_page) time.sleep(0.08) self.wait_for_ready_state_complete() referral_link = ('''<body>''' '''<a class='analytics referral test' href='%s' ''' '''style='font-family: Arial,sans-serif; ''' '''font-size: 30px; color: #18a2cd'>''' '''Magic Link Button</a></body>''' % destination_page) self.execute_script( '''document.body.outerHTML = \"%s\"''' % referral_link) self.click( "a.analytics.referral.test", timeout=2) # Clicks generated button time.sleep(0.15) try: self.click("html") time.sleep(0.08) except Exception: pass def generate_traffic(self, start_page, destination_page, loops=1): for loop in range(loops): self.generate_referral(start_page, destination_page) time.sleep(0.05) def generate_referral_chain(self, pages): if not type(pages) is tuple and not type(pages) is list: raise Exception( "Exception: Expecting a list of website pages for chaining!") if len(pages) < 2: raise Exception( "Exception: At least two website pages required for chaining!") for page in pages: # Find out if any of the web pages are invalid before continuing if not page_utils.is_valid_url(page): raise Exception( "Exception: Website page {%s} is not a valid URL!" % page) for page in pages: self.generate_referral(None, page) def generate_traffic_chain(self, pages, loops=1): for loop in range(loops): self.generate_referral_chain(pages) time.sleep(0.05) ############ def wait_for_element_present(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_element_present( self.driver, selector, by, timeout) def wait_for_element_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_element_visible( self.driver, selector, by, timeout) def wait_for_element(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible(selector, by=by, timeout=timeout) def get_element(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_present(selector, by=by, timeout=timeout) def assert_element_present(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_present(selector, by=by, timeout=timeout) return True def find_element(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible(selector, by=by, timeout=timeout) def assert_element(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_visible(selector, by=by, timeout=timeout) if self.demo_mode: selector, by = self.__recalculate_selector(selector, by) messenger_post = "ASSERT %s: %s" % (by, selector) self.__highlight_with_assert_success(messenger_post, selector, by) return True def assert_element_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.assert_element(selector, by=by, timeout=timeout) return True ############ def wait_for_text_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_text_visible( self.driver, text, selector, by, timeout) def wait_for_exact_text_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_exact_text_visible( self.driver, text, selector, by, timeout) def wait_for_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_text_visible( text, selector, by=by, timeout=timeout) def find_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_text_visible( text, selector, by=by, timeout=timeout) def assert_text_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.assert_text(text, selector, by=by, timeout=timeout) def assert_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_text_visible(text, selector, by=by, timeout=timeout) if self.demo_mode: selector, by = self.__recalculate_selector(selector, by) messenger_post = ("ASSERT TEXT {%s} in %s: %s" % (text, by, selector)) self.__highlight_with_assert_success(messenger_post, selector, by) return True def assert_exact_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_exact_text_visible( text, selector, by=by, timeout=timeout) if self.demo_mode: selector, by = self.__recalculate_selector(selector, by) messenger_post = ("ASSERT EXACT TEXT {%s} in %s: %s" % (text, by, selector)) self.__highlight_with_assert_success(messenger_post, selector, by) return True ############ def wait_for_link_text_present(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT start_ms = time.time() * 1000.0 stop_ms = start_ms + (timeout * 1000.0) for x in range(int(timeout * 5)): shared_utils.check_if_time_limit_exceeded() try: if not self.is_link_text_present(link_text): raise Exception( "Link text {%s} was not found!" % link_text) return except Exception: now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(0.2) raise Exception( "Link text {%s} was not present after %s seconds!" % ( link_text, timeout)) def wait_for_partial_link_text_present(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT start_ms = time.time() * 1000.0 stop_ms = start_ms + (timeout * 1000.0) for x in range(int(timeout * 5)): shared_utils.check_if_time_limit_exceeded() try: if not self.is_partial_link_text_present(link_text): raise Exception( "Partial Link text {%s} was not found!" % link_text) return except Exception: now_ms = time.time() * 1000.0 if now_ms >= stop_ms: break time.sleep(0.2) raise Exception( "Partial Link text {%s} was not present after %s seconds!" % ( link_text, timeout)) def wait_for_link_text_visible(self, link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible( link_text, by=By.LINK_TEXT, timeout=timeout) def wait_for_link_text(self, link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_link_text_visible(link_text, timeout=timeout) def find_link_text(self, link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_link_text_visible(link_text, timeout=timeout) def assert_link_text(self, link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_link_text_visible(link_text, timeout=timeout) if self.demo_mode: messenger_post = ("ASSERT LINK TEXT {%s}." % link_text) self.__highlight_with_assert_success( messenger_post, link_text, by=By.LINK_TEXT) return True def wait_for_partial_link_text(self, partial_link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_element_visible( partial_link_text, by=By.PARTIAL_LINK_TEXT, timeout=timeout) def find_partial_link_text(self, partial_link_text, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return self.wait_for_partial_link_text( partial_link_text, timeout=timeout) def assert_partial_link_text(self, partial_link_text, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_partial_link_text(partial_link_text, timeout=timeout) if self.demo_mode: messenger_post = ( "ASSERT PARTIAL LINK TEXT {%s}." % partial_link_text) self.__highlight_with_assert_success( messenger_post, partial_link_text, by=By.PARTIAL_LINK_TEXT) return True ############ def wait_for_element_absent(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH return page_actions.wait_for_element_absent( self.driver, selector, by, timeout) def assert_element_absent(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_absent(selector, by=by, timeout=timeout) return True ############ def wait_for_element_not_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_element_not_visible( self.driver, selector, by, timeout) def assert_element_not_visible(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_element_not_visible(selector, by=by, timeout=timeout) return True ############ def wait_for_text_not_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) selector, by = self.__recalculate_selector(selector, by) return page_actions.wait_for_text_not_visible( self.driver, text, selector, by, timeout) def assert_text_not_visible(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.SMALL_TIMEOUT if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.wait_for_text_not_visible(text, selector, by=by, timeout=timeout) ############ def wait_for_and_accept_alert(self, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return page_actions.wait_for_and_accept_alert(self.driver, timeout) def wait_for_and_dismiss_alert(self, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return page_actions.wait_for_and_dismiss_alert(self.driver, timeout) def wait_for_and_switch_to_alert(self, timeout=None): if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) return page_actions.wait_for_and_switch_to_alert(self.driver, timeout) ############ def __assert_eq(self, *args, **kwargs): minified_exception = None try: self.assertEqual(*args, **kwargs) except Exception as e: str_e = str(e) minified_exception = "\nAssertionError:\n" lines = str_e.split('\n') countdown = 3 countdown_on = False for line in lines: if countdown_on: minified_exception += line + '\n' countdown = countdown - 1 if countdown == 0: countdown_on = False elif line.startswith('F'): countdown_on = True countdown = 3 minified_exception += line + '\n' elif line.startswith('+') or line.startswith('-'): minified_exception += line + '\n' elif line.startswith('?'): minified_exception += line + '\n' elif line.strip().startswith('*'): minified_exception += line + '\n' if minified_exception: raise Exception(minified_exception) def check_window(self, name="default", level=0, baseline=False): if level == "0": level = 0 if level == "1": level = 1 if level == "2": level = 2 if level == "3": level = 3 if level != 0 and level != 1 and level != 2 and level != 3: raise Exception('Parameter "level" must be set to 0, 1, 2, or 3!') if self.demo_mode: raise Exception( "WARNING: Using Demo Mode will break layout tests " "that use the check_window() method due to custom " "HTML edits being made on the page!\n" "Please rerun without using Demo Mode!") module = self.__class__.__module__ if '.' in module and len(module.split('.')[-1]) > 1: module = module.split('.')[-1] test_id = "%s.%s" % (module, self._testMethodName) if not name or len(name) < 1: name = "default" name = str(name) visual_helper.visual_baseline_folder_setup() baseline_dir = constants.VisualBaseline.STORAGE_FOLDER visual_baseline_path = baseline_dir + "/" + test_id + "/" + name page_url_file = visual_baseline_path + "/page_url.txt" screenshot_file = visual_baseline_path + "/screenshot.png" level_1_file = visual_baseline_path + "/tags_level_1.txt" level_2_file = visual_baseline_path + "/tags_level_2.txt" level_3_file = visual_baseline_path + "/tags_level_3.txt" set_baseline = False if baseline or self.visual_baseline: set_baseline = True if not os.path.exists(visual_baseline_path): set_baseline = True try: os.makedirs(visual_baseline_path) except Exception: pass # Only reachable during multi-threaded test runs if not os.path.exists(page_url_file): set_baseline = True if not os.path.exists(screenshot_file): set_baseline = True if not os.path.exists(level_1_file): set_baseline = True if not os.path.exists(level_2_file): set_baseline = True if not os.path.exists(level_3_file): set_baseline = True page_url = self.get_current_url() soup = self.get_beautiful_soup() html_tags = soup.body.find_all() level_1 = [[tag.name] for tag in html_tags] level_1 = json.loads(json.dumps(level_1)) # Tuples become lists level_2 = [[tag.name, sorted(tag.attrs.keys())] for tag in html_tags] level_2 = json.loads(json.dumps(level_2)) # Tuples become lists level_3 = [[tag.name, sorted(tag.attrs.items())] for tag in html_tags] level_3 = json.loads(json.dumps(level_3)) # Tuples become lists if set_baseline: self.save_screenshot("screenshot.png", visual_baseline_path) out_file = codecs.open(page_url_file, "w+") out_file.writelines(page_url) out_file.close() out_file = codecs.open(level_1_file, "w+") out_file.writelines(json.dumps(level_1)) out_file.close() out_file = codecs.open(level_2_file, "w+") out_file.writelines(json.dumps(level_2)) out_file.close() out_file = codecs.open(level_3_file, "w+") out_file.writelines(json.dumps(level_3)) out_file.close() if not set_baseline: f = open(page_url_file, 'r') page_url_data = f.read().strip() f.close() f = open(level_1_file, 'r') level_1_data = json.loads(f.read()) f.close() f = open(level_2_file, 'r') level_2_data = json.loads(f.read()) f.close() f = open(level_3_file, 'r') level_3_data = json.loads(f.read()) f.close() domain_fail = ( "\nPage Domain Mismatch Failure: " "Current Page Domain doesn't match the Page Domain of the " "Baseline! Can't compare two completely different sites! " "Run with --visual_baseline to reset the baseline!") level_1_failure = ( "\n*\n*** Exception: <Level 1> Visual Diff Failure:\n" "* HTML tags don't match the baseline!") level_2_failure = ( "\n*\n*** Exception: <Level 2> Visual Diff Failure:\n" "* HTML tag attribute names don't match the baseline!") level_3_failure = ( "\n*\n*** Exception: <Level 3> Visual Diff Failure:\n" "* HTML tag attribute values don't match the baseline!") page_domain = self.get_domain_url(page_url) page_data_domain = self.get_domain_url(page_url_data) unittest.TestCase.maxDiff = 1000 if level != 0: self.assertEqual(page_data_domain, page_domain, domain_fail) unittest.TestCase.maxDiff = None if level == 3: self.__assert_eq(level_3_data, level_3, level_3_failure) if level == 2: self.__assert_eq(level_2_data, level_2, level_2_failure) unittest.TestCase.maxDiff = 1000 if level == 1: self.__assert_eq(level_1_data, level_1, level_1_failure) unittest.TestCase.maxDiff = None if level == 0: try: unittest.TestCase.maxDiff = 1000 self.assertEqual( page_domain, page_data_domain, domain_fail) unittest.TestCase.maxDiff = None self.__assert_eq(level_3_data, level_3, level_3_failure) except Exception as e: print(e) # Level-0 Dry Run (Only print the differences) ############ def __get_new_timeout(self, timeout): try: timeout_multiplier = float(self.timeout_multiplier) if timeout_multiplier <= 0.5: timeout_multiplier = 0.5 timeout = int(math.ceil(timeout_multiplier * timeout)) return timeout except Exception: # Wrong data type for timeout_multiplier (expecting int or float) return timeout ############ def __get_exception_message(self): exception_info = sys.exc_info()[1] if hasattr(exception_info, 'msg'): exc_message = exception_info.msg elif hasattr(exception_info, 'message'): exc_message = exception_info.message else: exc_message = sys.exc_info() return exc_message def __get_improved_exception_message(self): exc_message = self.__get_exception_message() maybe_using_old_chromedriver = False if "unknown error: call function result missing" in exc_message: maybe_using_old_chromedriver = True if self.browser == 'chrome' and maybe_using_old_chromedriver: update = ("Your version of ChromeDriver may be out-of-date! " "Please go to " "https://sites.google.com/a/chromium.org/chromedriver/ " "and download the latest version to your system PATH! " "Or use: ``seleniumbase install chromedriver`` . " "Original Exception Message: %s" % exc_message) exc_message = update return exc_message def __add_delayed_assert_failure(self): current_url = self.driver.current_url message = self.__get_exception_message() self.__delayed_assert_failures.append( "CHECK #%s: (%s)\n %s" % ( self.__delayed_assert_count, current_url, message)) def delayed_assert_element(self, selector, by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.MINI_TIMEOUT if self.timeout_multiplier and timeout == settings.MINI_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__delayed_assert_count += 1 try: url = self.get_current_url() if url == self.__last_url_of_delayed_assert: timeout = 1 else: self.__last_url_of_delayed_assert = url except Exception: pass try: self.wait_for_element_visible(selector, by=by, timeout=timeout) return True except Exception: self.__add_delayed_assert_failure() return False def delayed_assert_text(self, text, selector="html", by=By.CSS_SELECTOR, timeout=None): if not timeout: timeout = settings.MINI_TIMEOUT if self.timeout_multiplier and timeout == settings.MINI_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.__delayed_assert_count += 1 try: url = self.get_current_url() if url == self.__last_url_of_delayed_assert: timeout = 1 else: self.__last_url_of_delayed_assert = url except Exception: pass try: self.wait_for_text_visible(text, selector, by=by, timeout=timeout) return True except Exception: self.__add_delayed_assert_failure() return False def process_delayed_asserts(self, print_only=False): if self.__delayed_assert_failures: exception_output = '' exception_output += "\n*** DELAYED ASSERTION FAILURES FOR: " exception_output += "%s\n" % self.id() all_failing_checks = self.__delayed_assert_failures self.__delayed_assert_failures = [] for tb in all_failing_checks: exception_output += "%s\n" % tb if print_only: print(exception_output) else: raise Exception(exception_output) ############ def __js_click(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var simulateClick = function (elem) { var evt = new MouseEvent('click', { bubbles: true, cancelable: true, view: window }); var canceled = !elem.dispatchEvent(evt); }; var someLink = document.querySelector('%s'); simulateClick(someLink);""" % css_selector) self.execute_script(script) def __js_click_all(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) css_selector = self.convert_to_css_selector(selector, by=by) css_selector = re.escape(css_selector) css_selector = self.__escape_quotes_if_needed(css_selector) script = ("""var simulateClick = function (elem) { var evt = new MouseEvent('click', { bubbles: true, cancelable: true, view: window }); var canceled = !elem.dispatchEvent(evt); }; var $elements = document.querySelectorAll('%s'); var index = 0, length = $elements.length; for(; index < length; index++){ simulateClick($elements[index]);}""" % css_selector) self.execute_script(script) def __jquery_click(self, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) self.wait_for_element_present( selector, by=by, timeout=settings.SMALL_TIMEOUT) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) click_script = """jQuery('%s')[0].click()""" % selector self.safe_execute_script(click_script) def __get_href_from_link_text(self, link_text, hard_fail=True): href = self.get_link_attribute(link_text, "href", hard_fail) if not href: return None if href.startswith('//'): link = "http:" + href elif href.startswith('/'): url = self.driver.current_url domain_url = self.get_domain_url(url) link = domain_url + href else: link = href return link def __click_dropdown_link_text(self, link_text, link_css): soup = self.get_beautiful_soup() drop_down_list = [] for item in soup.select('li[class]'): drop_down_list.append(item) csstype = link_css.split('[')[1].split('=')[0] for item in drop_down_list: item_text_list = item.text.split('\n') if link_text in item_text_list and csstype in item.decode(): dropdown_css = "" try: for css_class in item['class']: dropdown_css += '.' dropdown_css += css_class except Exception: continue dropdown_css = item.name + dropdown_css matching_dropdowns = self.find_visible_elements(dropdown_css) for dropdown in matching_dropdowns: # The same class names might be used for multiple dropdowns if dropdown.is_displayed(): try: try: page_actions.hover_element( self.driver, dropdown) except Exception: # If hovering fails, driver is likely outdated # Time to go directly to the hidden link text self.open(self.__get_href_from_link_text( link_text)) return True page_actions.hover_element_and_click( self.driver, dropdown, link_text, click_by=By.LINK_TEXT, timeout=0.12) return True except Exception: pass return False def __get_href_from_partial_link_text(self, link_text, hard_fail=True): href = self.get_partial_link_text_attribute( link_text, "href", hard_fail) if not href: return None if href.startswith('//'): link = "http:" + href elif href.startswith('/'): url = self.driver.current_url domain_url = self.get_domain_url(url) link = domain_url + href else: link = href return link def __click_dropdown_partial_link_text(self, link_text, link_css): soup = self.get_beautiful_soup() drop_down_list = [] for item in soup.select('li[class]'): drop_down_list.append(item) csstype = link_css.split('[')[1].split('=')[0] for item in drop_down_list: item_text_list = item.text.split('\n') if link_text in item_text_list and csstype in item.decode(): dropdown_css = "" try: for css_class in item['class']: dropdown_css += '.' dropdown_css += css_class except Exception: continue dropdown_css = item.name + dropdown_css matching_dropdowns = self.find_visible_elements(dropdown_css) for dropdown in matching_dropdowns: # The same class names might be used for multiple dropdowns if dropdown.is_displayed(): try: try: page_actions.hover_element( self.driver, dropdown) except Exception: # If hovering fails, driver is likely outdated # Time to go directly to the hidden link text self.open( self.__get_href_from_partial_link_text( link_text)) return True page_actions.hover_element_and_click( self.driver, dropdown, link_text, click_by=By.LINK_TEXT, timeout=0.12) return True except Exception: pass return False def __recalculate_selector(self, selector, by): # Try to determine the type of selector automatically if page_utils.is_xpath_selector(selector): by = By.XPATH if page_utils.is_link_text_selector(selector): selector = page_utils.get_link_text_from_selector(selector) by = By.LINK_TEXT if page_utils.is_partial_link_text_selector(selector): selector = page_utils.get_partial_link_text_from_selector(selector) by = By.PARTIAL_LINK_TEXT if page_utils.is_name_selector(selector): name = page_utils.get_name_from_selector(selector) selector = '[name="%s"]' % name by = By.CSS_SELECTOR return (selector, by) def __make_css_match_first_element_only(self, selector): # Only get the first match return page_utils.make_css_match_first_element_only(selector) def __demo_mode_pause_if_active(self, tiny=False): if self.demo_mode: wait_time = settings.DEFAULT_DEMO_MODE_TIMEOUT if self.demo_sleep: wait_time = float(self.demo_sleep) if not tiny: time.sleep(wait_time) else: time.sleep(wait_time / 3.4) elif self.slow_mode: self.__slow_mode_pause_if_active() def __slow_mode_pause_if_active(self): if self.slow_mode: wait_time = settings.DEFAULT_DEMO_MODE_TIMEOUT if self.demo_sleep: wait_time = float(self.demo_sleep) time.sleep(wait_time) def __demo_mode_scroll_if_active(self, selector, by): if self.demo_mode: self.slow_scroll_to(selector, by=by) def __demo_mode_highlight_if_active(self, selector, by): if self.demo_mode: # Includes self.slow_scroll_to(selector, by=by) by default self.highlight(selector, by=by) elif self.slow_mode: # Just do the slow scroll part of the highlight() method selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__slow_scroll_to_element(element) def __scroll_to_element(self, element, selector=None, by=By.CSS_SELECTOR): success = js_utils.scroll_to_element(self.driver, element) if not success and selector: self.wait_for_ready_state_complete() element = page_actions.wait_for_element_visible( self.driver, selector, by, timeout=settings.SMALL_TIMEOUT) self.__demo_mode_pause_if_active(tiny=True) def __slow_scroll_to_element(self, element): js_utils.slow_scroll_to_element(self.driver, element, self.browser) def __highlight_with_assert_success( self, message, selector, by=By.CSS_SELECTOR): selector, by = self.__recalculate_selector(selector, by) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) try: selector = self.convert_to_css_selector(selector, by=by) except Exception: # Don't highlight if can't convert to CSS_SELECTOR return try: self.__slow_scroll_to_element(element) except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=by, timeout=settings.SMALL_TIMEOUT) self.__slow_scroll_to_element(element) o_bs = '' # original_box_shadow try: style = element.get_attribute('style') except (StaleElementReferenceException, ENI_Exception): self.wait_for_ready_state_complete() time.sleep(0.05) element = self.wait_for_element_visible( selector, by=By.CSS_SELECTOR, timeout=settings.SMALL_TIMEOUT) style = element.get_attribute('style') if style: if 'box-shadow: ' in style: box_start = style.find('box-shadow: ') box_end = style.find(';', box_start) + 1 original_box_shadow = style[box_start:box_end] o_bs = original_box_shadow if ":contains" not in selector and ":first" not in selector: selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) self.__highlight_with_js_2(message, selector, o_bs) else: selector = self.__make_css_match_first_element_only(selector) selector = re.escape(selector) selector = self.__escape_quotes_if_needed(selector) try: self.__highlight_with_jquery_2(message, selector, o_bs) except Exception: pass # JQuery probably couldn't load. Skip highlighting. time.sleep(0.065) def __highlight_with_js_2(self, message, selector, o_bs): js_utils.highlight_with_js_2( self.driver, message, selector, o_bs, self.message_duration) def __highlight_with_jquery_2(self, message, selector, o_bs): js_utils.highlight_with_jquery_2( self.driver, message, selector, o_bs, self.message_duration) ############ # Deprecated Methods (Replace these if they're still in your code!) @decorators.deprecated( "scroll_click() is deprecated. Use self.click() - It scrolls for you!") def scroll_click(self, selector, by=By.CSS_SELECTOR): # DEPRECATED - self.click() now scrolls to the element before clicking. # self.scroll_to(selector, by=by) # Redundant self.click(selector, by=by) @decorators.deprecated( "update_text_value() is deprecated. Use self.update_text() instead!") def update_text_value(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None, retry=False): # DEPRECATED - self.update_text() should be used instead. if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) if page_utils.is_xpath_selector(selector): by = By.XPATH self.update_text( selector, new_value, by=by, timeout=timeout, retry=retry) @decorators.deprecated( "jquery_update_text_value() is deprecated. Use jquery_update_text()") def jquery_update_text_value(self, selector, new_value, by=By.CSS_SELECTOR, timeout=None): # DEPRECATED - self.jquery_update_text() should be used instead. if not timeout: timeout = settings.LARGE_TIMEOUT if self.timeout_multiplier and timeout == settings.LARGE_TIMEOUT: timeout = self.__get_new_timeout(timeout) self.jquery_update_text(selector, new_value, by=by, timeout=timeout) @decorators.deprecated( "jq_format() is deprecated. Use re.escape() instead!") def jq_format(self, code): # DEPRECATED - re.escape() already performs the intended action! return js_utils._jq_format(code) ############ def setUp(self, masterqa_mode=False): self.masterqa_mode = masterqa_mode self.is_pytest = None try: # This raises an exception if the test is not coming from pytest self.is_pytest = sb_config.is_pytest except Exception: # Not using pytest (probably nosetests) self.is_pytest = False if self.is_pytest: # pytest-specific code test_id = self.__get_test_id() self.browser = sb_config.browser self.data = sb_config.data self.var1 = sb_config.var1 self.var2 = sb_config.var2 self.var3 = sb_config.var3 self.slow_mode = sb_config.slow_mode self.demo_mode = sb_config.demo_mode self.demo_sleep = sb_config.demo_sleep self.highlights = sb_config.highlights self.time_limit = sb_config.time_limit self.environment = sb_config.environment self.env = self.environment # Add a shortened version self.with_selenium = sb_config.with_selenium # Should be True self.headless = sb_config.headless self.headless_active = False self.headed = sb_config.headed self.start_page = sb_config.start_page self.log_path = sb_config.log_path self.with_testing_base = sb_config.with_testing_base self.with_basic_test_info = sb_config.with_basic_test_info self.with_screen_shots = sb_config.with_screen_shots self.with_page_source = sb_config.with_page_source self.with_db_reporting = sb_config.with_db_reporting self.with_s3_logging = sb_config.with_s3_logging self.servername = sb_config.servername self.port = sb_config.port self.proxy_string = sb_config.proxy_string self.user_agent = sb_config.user_agent self.mobile_emulator = sb_config.mobile_emulator self.device_metrics = sb_config.device_metrics self.cap_file = sb_config.cap_file self.cap_string = sb_config.cap_string self.settings_file = sb_config.settings_file self.database_env = sb_config.database_env self.message_duration = sb_config.message_duration self.js_checking_on = sb_config.js_checking_on self.ad_block_on = sb_config.ad_block_on self.verify_delay = sb_config.verify_delay self.disable_csp = sb_config.disable_csp self.enable_sync = sb_config.enable_sync self.use_auto_ext = sb_config.use_auto_ext self.no_sandbox = sb_config.no_sandbox self.disable_gpu = sb_config.disable_gpu self.incognito = sb_config.incognito self.guest_mode = sb_config.guest_mode self.devtools = sb_config.devtools self.user_data_dir = sb_config.user_data_dir self.extension_zip = sb_config.extension_zip self.extension_dir = sb_config.extension_dir self.maximize_option = sb_config.maximize_option self._reuse_session = sb_config.reuse_session self.save_screenshot_after_test = sb_config.save_screenshot self.visual_baseline = sb_config.visual_baseline self.timeout_multiplier = sb_config.timeout_multiplier self.pytest_html_report = sb_config.pytest_html_report self.report_on = False if self.pytest_html_report: self.report_on = True self.use_grid = False if self.servername != "localhost": # Use Selenium Grid (Use --server="127.0.0.1" for a local Grid) self.use_grid = True if self.with_db_reporting: from seleniumbase.core.application_manager import ( ApplicationManager) from seleniumbase.core.testcase_manager import ( ExecutionQueryPayload) import getpass self.execution_guid = str(uuid.uuid4()) self.testcase_guid = None self.execution_start_time = 0 self.case_start_time = 0 self.application = None self.testcase_manager = None self.error_handled = False self.testcase_manager = TestcaseManager(self.database_env) # exec_payload = ExecutionQueryPayload() exec_payload.execution_start_time = int(time.time() * 1000) self.execution_start_time = exec_payload.execution_start_time exec_payload.guid = self.execution_guid exec_payload.username = getpass.getuser() self.testcase_manager.insert_execution_data(exec_payload) # data_payload = TestcaseDataPayload() self.testcase_guid = str(uuid.uuid4()) data_payload.guid = self.testcase_guid data_payload.execution_guid = self.execution_guid if self.with_selenium: data_payload.browser = self.browser else: data_payload.browser = "N/A" data_payload.test_address = test_id application = ApplicationManager.generate_application_string( self._testMethodName) data_payload.env = application.split('.')[0] data_payload.start_time = application.split('.')[1] data_payload.state = constants.State.NOTRUN self.testcase_manager.insert_testcase_data(data_payload) self.case_start_time = int(time.time() * 1000) if self.headless: width = settings.HEADLESS_START_WIDTH height = settings.HEADLESS_START_HEIGHT try: # from pyvirtualdisplay import Display # Skip for own lib from seleniumbase.virtual_display.display import Display self.display = Display(visible=0, size=(width, height)) self.display.start() self.headless_active = True except Exception: # pyvirtualdisplay might not be necessary anymore because # Chrome and Firefox now have built-in headless displays pass else: # (Nosetests / Not Pytest) pass # Setup performed in plugins # Verify that SeleniumBase is installed successfully if not hasattr(self, "browser"): raise Exception("""SeleniumBase plugins DID NOT load!\n\n""" """*** Please REINSTALL SeleniumBase using: >\n""" """ >>> "pip install -r requirements.txt"\n""" """ >>> "python setup.py install" """) # Configure the test time limit (if used) self.set_time_limit(self.time_limit) # Set the start time for the test (in ms) sb_config.start_time_ms = int(time.time() * 1000.0) # Parse the settings file if self.settings_file: settings_parser.set_settings(self.settings_file) # Mobile Emulator device metrics: CSS Width, CSS Height, & Pixel-Ratio if self.device_metrics: metrics_string = self.device_metrics metrics_string = metrics_string.replace(' ', '') metrics_list = metrics_string.split(',') exception_string = ( 'Invalid input for Mobile Emulator device metrics!\n' 'Expecting a comma-separated string with three\n' 'integer values for Width, Height, and Pixel-Ratio.\n' 'Example: --metrics="411,731,3" ') if len(metrics_list) != 3: raise Exception(exception_string) try: self.__device_width = int(metrics_list[0]) self.__device_height = int(metrics_list[1]) self.__device_pixel_ratio = int(metrics_list[2]) self.mobile_emulator = True except Exception: raise Exception(exception_string) if self.mobile_emulator: if not self.user_agent: # Use the Pixel 3 user agent by default if not specified self.user_agent = ( "Mozilla/5.0 (Linux; Android 9; Pixel 3 XL) " "AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/76.0.3809.132 Mobile Safari/537.36") has_url = False if self._reuse_session: if not hasattr(sb_config, 'shared_driver'): sb_config.shared_driver = None if sb_config.shared_driver: try: self._default_driver = sb_config.shared_driver self.driver = sb_config.shared_driver self._drivers_list = [sb_config.shared_driver] url = self.get_current_url() if len(url) > 3: has_url = True except Exception: pass if self._reuse_session and sb_config.shared_driver and has_url: if self.start_page and len(self.start_page) >= 4: if page_utils.is_valid_url(self.start_page): self.open(self.start_page) else: new_start_page = "http://" + self.start_page if page_utils.is_valid_url(new_start_page): self.open(new_start_page) else: if self.get_current_url() != "data:,": self.open("data:,") else: # Launch WebDriver for both Pytest and Nosetests self.driver = self.get_new_driver(browser=self.browser, headless=self.headless, servername=self.servername, port=self.port, proxy=self.proxy_string, agent=self.user_agent, switch_to=True, cap_file=self.cap_file, cap_string=self.cap_string, disable_csp=self.disable_csp, enable_sync=self.enable_sync, use_auto_ext=self.use_auto_ext, no_sandbox=self.no_sandbox, disable_gpu=self.disable_gpu, incognito=self.incognito, guest_mode=self.guest_mode, devtools=self.devtools, user_data_dir=self.user_data_dir, extension_zip=self.extension_zip, extension_dir=self.extension_dir, is_mobile=self.mobile_emulator, d_width=self.__device_width, d_height=self.__device_height, d_p_r=self.__device_pixel_ratio) self._default_driver = self.driver if self._reuse_session: sb_config.shared_driver = self.driver def __set_last_page_screenshot(self): if not self.__last_page_screenshot and ( not self.__last_page_screenshot_png): try: element = self.driver.find_element( by=By.TAG_NAME, value="body") if self.is_pytest and self.report_on: self.__last_page_screenshot_png = ( self.driver.get_screenshot_as_png()) self.__last_page_screenshot = element.screenshot_as_base64 else: self.__last_page_screenshot_png = element.screenshot_as_png except Exception: if not self.__last_page_screenshot: if self.is_pytest and self.report_on: try: self.__last_page_screenshot = ( self.driver.get_screenshot_as_base64()) except Exception: pass if not self.__last_page_screenshot_png: try: self.__last_page_screenshot_png = ( self.driver.get_screenshot_as_png()) except Exception: pass def __set_last_page_url(self): if not self.__last_page_url: try: self.__last_page_url = log_helper.get_last_page(self.driver) except Exception: self.__last_page_url = None def __set_last_page_source(self): if not self.__last_page_source: try: self.__last_page_source = ( log_helper.get_html_source_with_base_href( self.driver, self.driver.page_source)) except Exception: self.__last_page_source = None def __insert_test_result(self, state, err): data_payload = TestcaseDataPayload() data_payload.runtime = int(time.time() * 1000) - self.case_start_time data_payload.guid = self.testcase_guid data_payload.execution_guid = self.execution_guid data_payload.state = state if err: import traceback tb_string = traceback.format_exc() if "Message: " in tb_string: data_payload.message = "Message: " + tb_string.split( "Message: ")[-1] elif "Exception: " in tb_string: data_payload.message = tb_string.split("Exception: ")[-1] elif "Error: " in tb_string: data_payload.message = tb_string.split("Error: ")[-1] else: data_payload.message = "Unknown Error: See Stacktrace" self.testcase_manager.update_testcase_data(data_payload) def __add_pytest_html_extra(self): if not self.__added_pytest_html_extra: try: if self.with_selenium: if not self.__last_page_screenshot: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() if self.report_on: extra_url = {} extra_url['name'] = 'URL' extra_url['format'] = 'url' extra_url['content'] = self.get_current_url() extra_url['mime_type'] = None extra_url['extension'] = None extra_image = {} extra_image['name'] = 'Screenshot' extra_image['format'] = 'image' extra_image['content'] = self.__last_page_screenshot extra_image['mime_type'] = 'image/png' extra_image['extension'] = 'png' self.__added_pytest_html_extra = True self._html_report_extra.append(extra_url) self._html_report_extra.append(extra_image) except Exception: pass def __quit_all_drivers(self): if self._reuse_session and sb_config.shared_driver: if len(self._drivers_list) > 0: sb_config.shared_driver = self._drivers_list[0] self._default_driver = self._drivers_list[0] self.switch_to_default_driver() if len(self._drivers_list) > 1: self._drivers_list = self._drivers_list[1:] else: self._drivers_list = [] # Close all open browser windows self._drivers_list.reverse() # Last In, First Out for driver in self._drivers_list: try: driver.quit() except AttributeError: pass except Exception: pass self.driver = None self._default_driver = None self._drivers_list = [] def __has_exception(self): has_exception = False if sys.version_info[0] >= 3 and hasattr(self, '_outcome'): if hasattr(self._outcome, 'errors') and self._outcome.errors: has_exception = True else: has_exception = sys.exc_info()[1] is not None return has_exception def __get_test_id(self): test_id = "%s.%s.%s" % (self.__class__.__module__, self.__class__.__name__, self._testMethodName) return test_id def __create_log_path_as_needed(self, test_logpath): if not os.path.exists(test_logpath): try: os.makedirs(test_logpath) except Exception: pass # Only reachable during multi-threaded runs def save_teardown_screenshot(self): if self.__has_exception() or self.save_screenshot_after_test: test_id = self.__get_test_id() test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() if self.is_pytest: self.__add_pytest_html_extra() def tearDown(self): self.__slow_mode_pause_if_active() has_exception = self.__has_exception() if self.__delayed_assert_failures: print( "\nWhen using self.delayed_assert_*() methods in your tests, " "remember to call self.process_delayed_asserts() afterwards. " "Now calling in tearDown()...\nFailures Detected:") if not has_exception: self.process_delayed_asserts() else: self.process_delayed_asserts(print_only=True) if self.is_pytest: # pytest-specific code test_id = self.__get_test_id() try: with_selenium = self.with_selenium except Exception: sub_class_name = str( self.__class__.__bases__[0]).split('.')[-1].split("'")[0] sub_file_name = str(self.__class__.__bases__[0]).split('.')[-2] sub_file_name = sub_file_name + ".py" class_name = str(self.__class__).split('.')[-1].split("'")[0] file_name = str(self.__class__).split('.')[-2] + ".py" class_name_used = sub_class_name file_name_used = sub_file_name if sub_class_name == "BaseCase": class_name_used = class_name file_name_used = file_name fix_setup = "super(%s, self).setUp()" % class_name_used fix_teardown = "super(%s, self).tearDown()" % class_name_used message = ("You're overriding SeleniumBase's BaseCase setUp() " "method with your own setUp() method, which breaks " "SeleniumBase. You can fix this by going to your " "%s class located in your %s file and adding the " "following line of code AT THE BEGINNING of your " "setUp() method:\n%s\n\nAlso make sure " "you have added the following line of code AT THE " "END of your tearDown() method:\n%s\n" % (class_name_used, file_name_used, fix_setup, fix_teardown)) raise Exception(message) if with_selenium: # Save a screenshot if logging is on when an exception occurs if has_exception: self.__add_pytest_html_extra() if self.with_testing_base and not has_exception and ( self.save_screenshot_after_test): test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) self.__add_pytest_html_extra() if self.with_testing_base and has_exception: test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) if ((not self.with_screen_shots) and ( not self.with_basic_test_info) and ( not self.with_page_source)): # Log everything if nothing specified (if testing_base) if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) log_helper.log_test_failure_data( self, test_logpath, self.driver, self.browser, self.__last_page_url) log_helper.log_page_source( test_logpath, self.driver, self.__last_page_source) else: if self.with_screen_shots: if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) if self.with_basic_test_info: log_helper.log_test_failure_data( self, test_logpath, self.driver, self.browser, self.__last_page_url) if self.with_page_source: log_helper.log_page_source( test_logpath, self.driver, self.__last_page_source) # (Pytest) Finally close all open browser windows self.__quit_all_drivers() if self.headless: if self.headless_active: try: self.display.stop() except AttributeError: pass except Exception: pass self.display = None if self.with_db_reporting: if has_exception: self.__insert_test_result(constants.State.ERROR, True) else: self.__insert_test_result(constants.State.PASS, False) runtime = int(time.time() * 1000) - self.execution_start_time self.testcase_manager.update_execution_data( self.execution_guid, runtime) if self.with_s3_logging and has_exception: from seleniumbase.core.s3_manager import S3LoggingBucket s3_bucket = S3LoggingBucket() guid = str(uuid.uuid4().hex) path = "%s/%s" % (self.log_path, test_id) uploaded_files = [] for logfile in os.listdir(path): logfile_name = "%s/%s/%s" % (guid, test_id, logfile.split(path)[-1]) s3_bucket.upload_file(logfile_name, "%s/%s" % (path, logfile)) uploaded_files.append(logfile_name) s3_bucket.save_uploaded_file_names(uploaded_files) index_file = s3_bucket.upload_index_file(test_id, guid) print("\n\n*** Log files uploaded: ***\n%s\n" % index_file) logging.info( "\n\n*** Log files uploaded: ***\n%s\n" % index_file) if self.with_db_reporting: self.testcase_manager = TestcaseManager(self.database_env) data_payload = TestcaseDataPayload() data_payload.guid = self.testcase_guid data_payload.logURL = index_file self.testcase_manager.update_testcase_log_url(data_payload) else: # (Nosetests) if has_exception: test_id = self.__get_test_id() test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) log_helper.log_test_failure_data( self, test_logpath, self.driver, self.browser, self.__last_page_url) if len(self._drivers_list) > 0: if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) log_helper.log_page_source( test_logpath, self.driver, self.__last_page_source) elif self.save_screenshot_after_test: test_id = self.__get_test_id() test_logpath = self.log_path + "/" + test_id self.__create_log_path_as_needed(test_logpath) if not self.__last_page_screenshot_png: self.__set_last_page_screenshot() self.__set_last_page_url() self.__set_last_page_source() log_helper.log_screenshot( test_logpath, self.driver, self.__last_page_screenshot_png) if self.report_on: self._last_page_screenshot = self.__last_page_screenshot_png try: self._last_page_url = self.get_current_url() except Exception: self._last_page_url = "(Error: Unknown URL)" # Finally close all open browser windows self.__quit_all_drivers()
true
true
f711a692c39f30b542a6842996b85b4cb4415fc1
2,095
py
Python
checkov/common/util/suppression.py
pmalkki/checkov
b6cdf386dd976fe27c16fed6d550756a678a5d7b
[ "Apache-2.0" ]
null
null
null
checkov/common/util/suppression.py
pmalkki/checkov
b6cdf386dd976fe27c16fed6d550756a678a5d7b
[ "Apache-2.0" ]
null
null
null
checkov/common/util/suppression.py
pmalkki/checkov
b6cdf386dd976fe27c16fed6d550756a678a5d7b
[ "Apache-2.0" ]
null
null
null
from __future__ import annotations import re from checkov.common.bridgecrew.integration_features.features.policy_metadata_integration import ( integration as metadata_integration, ) from checkov.common.comment.enum import COMMENT_REGEX from checkov.common.models.enums import CheckResult from checkov.common.typing import _CheckResult, _SkippedCheck def collect_suppressions_for_report(code_lines: list[tuple[int, str]]) -> dict[str, _CheckResult]: """Searches for suppressions in a config block to be used in a report""" suppressions = {} for _, line in code_lines: skip_search = re.search(COMMENT_REGEX, line) if skip_search: check_result: _CheckResult = { "result": CheckResult.SKIPPED, "suppress_comment": skip_search.group(3)[1:] if skip_search.group(3) else "No comment provided", } suppressions[skip_search.group(2)] = check_result return suppressions def collect_suppressions_for_context(code_lines: list[tuple[int, int | str]]) -> list[_SkippedCheck]: """Searches for suppressions in a config block to be used in a context""" skipped_checks = [] bc_id_mapping = metadata_integration.bc_to_ckv_id_mapping for _, line in code_lines: skip_search = re.search(COMMENT_REGEX, str(line)) if skip_search: skipped_check: _SkippedCheck = { "id": skip_search.group(2), "suppress_comment": skip_search.group(3)[1:] if skip_search.group(3) else "No comment provided", } # No matter which ID was used to skip, save the pair of IDs in the appropriate fields if bc_id_mapping and skipped_check["id"] in bc_id_mapping: skipped_check["bc_id"] = skipped_check["id"] skipped_check["id"] = bc_id_mapping[skipped_check["id"]] elif metadata_integration.check_metadata: skipped_check["bc_id"] = metadata_integration.get_bc_id(skipped_check["id"]) skipped_checks.append(skipped_check) return skipped_checks
40.288462
112
0.685442
from __future__ import annotations import re from checkov.common.bridgecrew.integration_features.features.policy_metadata_integration import ( integration as metadata_integration, ) from checkov.common.comment.enum import COMMENT_REGEX from checkov.common.models.enums import CheckResult from checkov.common.typing import _CheckResult, _SkippedCheck def collect_suppressions_for_report(code_lines: list[tuple[int, str]]) -> dict[str, _CheckResult]: suppressions = {} for _, line in code_lines: skip_search = re.search(COMMENT_REGEX, line) if skip_search: check_result: _CheckResult = { "result": CheckResult.SKIPPED, "suppress_comment": skip_search.group(3)[1:] if skip_search.group(3) else "No comment provided", } suppressions[skip_search.group(2)] = check_result return suppressions def collect_suppressions_for_context(code_lines: list[tuple[int, int | str]]) -> list[_SkippedCheck]: skipped_checks = [] bc_id_mapping = metadata_integration.bc_to_ckv_id_mapping for _, line in code_lines: skip_search = re.search(COMMENT_REGEX, str(line)) if skip_search: skipped_check: _SkippedCheck = { "id": skip_search.group(2), "suppress_comment": skip_search.group(3)[1:] if skip_search.group(3) else "No comment provided", } if bc_id_mapping and skipped_check["id"] in bc_id_mapping: skipped_check["bc_id"] = skipped_check["id"] skipped_check["id"] = bc_id_mapping[skipped_check["id"]] elif metadata_integration.check_metadata: skipped_check["bc_id"] = metadata_integration.get_bc_id(skipped_check["id"]) skipped_checks.append(skipped_check) return skipped_checks
true
true
f711a6a58c605dd7610cf6b3c4665eb78ee51dff
1,823
py
Python
src/tests/TestAccidental.py
ytyaru/Python.MusicTheory.Pitch.NoteNumber.201709171322
6b69d8f05c60b4b844c300114ac04023d4909f1e
[ "CC0-1.0" ]
null
null
null
src/tests/TestAccidental.py
ytyaru/Python.MusicTheory.Pitch.NoteNumber.201709171322
6b69d8f05c60b4b844c300114ac04023d4909f1e
[ "CC0-1.0" ]
null
null
null
src/tests/TestAccidental.py
ytyaru/Python.MusicTheory.Pitch.NoteNumber.201709171322
6b69d8f05c60b4b844c300114ac04023d4909f1e
[ "CC0-1.0" ]
null
null
null
import unittest from MusicTheory.pitch.Accidental import Accidental import Framework.ConstMeta """ Degreeのテスト。 """ class TestAccidental(unittest.TestCase): def test_Accidentals(self): self.assertEqual(Accidental.Accidentals, {'♯': 1, '#': 1, '+': 1, '♭': -1, 'b': -1, '-': -1}) def test_Accidentals_NotSet(self): with self.assertRaises(Framework.ConstMeta.ConstMeta.ConstError) as e: Accidental.Accidentals = 'some value.' self.assertEqual('readonly。再代入禁止です。', str(e.exception)) def test_Get(self): for count in range(1, 4): for name, interval in Accidental.Accidentals.items(): if not name: continue with self.subTest(accidenta=name, count=count): self.assertEqual(Accidental.Get(name * count), interval * count) def test_Get_None(self): self.assertEqual(Accidental.Get(None), 0) def test_Get_Blank(self): self.assertEqual(Accidental.Get(''), 0) def test_Get_int(self): with self.assertRaises(TypeError) as e: Accidental.Get(100) self.assertIn('引数accidentalは文字列型にしてください。', str(e.exception)) def test_Get_NotSameChars(self): with self.assertRaises(ValueError) as e: Accidental.Get('無効な文字') self.assertIn('引数accidentalは同じ文字のみ連続使用を許されます。異なる文字を混在させることはできません。', str(e.exception)) def test_Get_Invalid(self): with self.assertRaises(ValueError) as e: Accidental.Get('無無無') self.assertIn('引数accidentalに使える文字は次のものだけです。', str(e.exception)) def test_Get_Valid_NotSameChars(self): with self.assertRaises(ValueError) as e: Accidental.Get('+-') self.assertIn('引数accidentalは同じ文字のみ連続使用を許されます。異なる文字を混在させることはできません。', str(e.exception)) if __name__ == '__main__': unittest.main()
43.404762
101
0.667032
import unittest from MusicTheory.pitch.Accidental import Accidental import Framework.ConstMeta class TestAccidental(unittest.TestCase): def test_Accidentals(self): self.assertEqual(Accidental.Accidentals, {'♯': 1, '#': 1, '+': 1, '♭': -1, 'b': -1, '-': -1}) def test_Accidentals_NotSet(self): with self.assertRaises(Framework.ConstMeta.ConstMeta.ConstError) as e: Accidental.Accidentals = 'some value.' self.assertEqual('readonly。再代入禁止です。', str(e.exception)) def test_Get(self): for count in range(1, 4): for name, interval in Accidental.Accidentals.items(): if not name: continue with self.subTest(accidenta=name, count=count): self.assertEqual(Accidental.Get(name * count), interval * count) def test_Get_None(self): self.assertEqual(Accidental.Get(None), 0) def test_Get_Blank(self): self.assertEqual(Accidental.Get(''), 0) def test_Get_int(self): with self.assertRaises(TypeError) as e: Accidental.Get(100) self.assertIn('引数accidentalは文字列型にしてください。', str(e.exception)) def test_Get_NotSameChars(self): with self.assertRaises(ValueError) as e: Accidental.Get('無効な文字') self.assertIn('引数accidentalは同じ文字のみ連続使用を許されます。異なる文字を混在させることはできません。', str(e.exception)) def test_Get_Invalid(self): with self.assertRaises(ValueError) as e: Accidental.Get('無無無') self.assertIn('引数accidentalに使える文字は次のものだけです。', str(e.exception)) def test_Get_Valid_NotSameChars(self): with self.assertRaises(ValueError) as e: Accidental.Get('+-') self.assertIn('引数accidentalは同じ文字のみ連続使用を許されます。異なる文字を混在させることはできません。', str(e.exception)) if __name__ == '__main__': unittest.main()
true
true
f711a8d72b834822ba5d3941177f22987ee79647
1,172
py
Python
framework/parsers/ucXception_fi_parser.py
ucx-code/ucXception
6b1f4fe4aa53a28e87584d07f540095c20ee50e9
[ "BSD-3-Clause" ]
2
2020-08-11T10:54:56.000Z
2021-03-22T14:54:19.000Z
framework/parsers/ucXception_fi_parser.py
ucx-code/ucXception
6b1f4fe4aa53a28e87584d07f540095c20ee50e9
[ "BSD-3-Clause" ]
null
null
null
framework/parsers/ucXception_fi_parser.py
ucx-code/ucXception
6b1f4fe4aa53a28e87584d07f540095c20ee50e9
[ "BSD-3-Clause" ]
null
null
null
import numpy as np def map_reg_to_text(reg_code): reg_dict = ("rip", "rsp", "rax", "rbx", "rcx", "rdx", "cs", "ss", "eflags", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15", "rsi", "rdi", "orig_rax", "fs_base", "gs_base", "ds", "es", "fs", "gs") return reg_dict[reg_code] class ucXception_fi_parser: def parse(self, inj_time, reg, bit, chosen_thread, stdout, stderr): row = {} row["inj_time"] = inj_time row["reg"] = map_reg_to_text(reg) row["reg_d"] = np.int32(reg) row["bit"] = np.int32(bit) row["pid"] = np.int32(chosen_thread) # Get the values of old and new registers prefix = "none" for line in stdout.split("\n")[:-1]: if "Old register values" in line: prefix="old_" elif "New register values" in line: prefix="new_" else: (reg_name, reg_val) = line.split(": ") reg_name = reg_name.rstrip().lower() reg_val = "0x%s" % reg_val.rstrip() #print reg_name, reg_val, type(reg_val) row[prefix + reg_name] = reg_val # We also add the register value in decimal row[prefix + reg_name + "_d"] = np.int(reg_val, 16) # np.int64 gives a strange exception--- (numpy bug?) return row
31.675676
108
0.617747
import numpy as np def map_reg_to_text(reg_code): reg_dict = ("rip", "rsp", "rax", "rbx", "rcx", "rdx", "cs", "ss", "eflags", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15", "rsi", "rdi", "orig_rax", "fs_base", "gs_base", "ds", "es", "fs", "gs") return reg_dict[reg_code] class ucXception_fi_parser: def parse(self, inj_time, reg, bit, chosen_thread, stdout, stderr): row = {} row["inj_time"] = inj_time row["reg"] = map_reg_to_text(reg) row["reg_d"] = np.int32(reg) row["bit"] = np.int32(bit) row["pid"] = np.int32(chosen_thread) prefix = "none" for line in stdout.split("\n")[:-1]: if "Old register values" in line: prefix="old_" elif "New register values" in line: prefix="new_" else: (reg_name, reg_val) = line.split(": ") reg_name = reg_name.rstrip().lower() reg_val = "0x%s" % reg_val.rstrip() row[prefix + reg_name] = reg_val row[prefix + reg_name + "_d"] = np.int(reg_val, 16) return row
true
true
f711aa1005ae5c787a4ebea5165a9b641bd45ab8
1,341
py
Python
src/databricks/azext_databricks/__init__.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
207
2017-11-29T06:59:41.000Z
2022-03-31T10:00:53.000Z
src/databricks/azext_databricks/__init__.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
4,061
2017-10-27T23:19:56.000Z
2022-03-31T23:18:30.000Z
src/databricks/azext_databricks/__init__.py
haroonf/azure-cli-extensions
61c044d34c224372f186934fa7c9313f1cd3a525
[ "MIT" ]
802
2017-10-11T17:36:26.000Z
2022-03-31T22:24:32.000Z
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core import AzCommandsLoader from azext_databricks._help import helps # pylint: disable=unused-import class DatabricksClientCommandsLoader(AzCommandsLoader): def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType from azext_databricks._client_factory import cf_databricks databricks_custom = CliCommandType( operations_tmpl='azext_databricks.custom#{}', client_factory=cf_databricks) super().__init__(cli_ctx=cli_ctx, custom_command_type=databricks_custom) def load_command_table(self, args): from azext_databricks.commands import load_command_table load_command_table(self, args) return self.command_table def load_arguments(self, command): from azext_databricks._params import load_arguments load_arguments(self, command) COMMAND_LOADER_CLS = DatabricksClientCommandsLoader
40.636364
95
0.621178
from azure.cli.core import AzCommandsLoader from azext_databricks._help import helps class DatabricksClientCommandsLoader(AzCommandsLoader): def __init__(self, cli_ctx=None): from azure.cli.core.commands import CliCommandType from azext_databricks._client_factory import cf_databricks databricks_custom = CliCommandType( operations_tmpl='azext_databricks.custom#{}', client_factory=cf_databricks) super().__init__(cli_ctx=cli_ctx, custom_command_type=databricks_custom) def load_command_table(self, args): from azext_databricks.commands import load_command_table load_command_table(self, args) return self.command_table def load_arguments(self, command): from azext_databricks._params import load_arguments load_arguments(self, command) COMMAND_LOADER_CLS = DatabricksClientCommandsLoader
true
true
f711aad73b54817b0d66ca271a5e4890b106d034
6,744
py
Python
home/views.py
IkshulDureja/Book-Attic
7c20e94a072012b5a89a5d231e8ae1e8d7732efe
[ "MIT" ]
2
2020-11-10T04:51:37.000Z
2020-12-25T12:21:56.000Z
home/views.py
IkshulDureja/Book-Attic
7c20e94a072012b5a89a5d231e8ae1e8d7732efe
[ "MIT" ]
null
null
null
home/views.py
IkshulDureja/Book-Attic
7c20e94a072012b5a89a5d231e8ae1e8d7732efe
[ "MIT" ]
null
null
null
from django.shortcuts import render,HttpResponse,redirect from .models import books from django.contrib.auth.models import User from django.contrib.auth import login,logout,authenticate from django.contrib.auth.decorators import login_required from django.contrib import messages from . forms import sellbookform, ContactForm from .models import Order,TrackUpdate from django.core.mail import send_mail def loginsignup(request): return render(request,'home/loginlink.html') def home(request): allProds = [] book = books.objects.all() categories = books.objects.values('category') ca = {item['category'] for item in categories} cats = list(ca) for cat in cats: prod = books.objects.filter(category = cat) allProds.append([prod,range(len(prod))]) params = {'books':book, 'cats':cats, 'allProds':allProds} return render(request,'home/home.html',params) def handleSignup(request): if request.method =='POST': username = request.POST['username'] email = request.POST['signupemail'] fname = request.POST['fname'] lname = request.POST['lname'] pass1 = request.POST['pass1'] pass2 = request.POST['pass2'] if len(username) > 25: messages.error(request, "User name must be under 25 Characters") return redirect('/') if pass1 != pass2: messages.error(request, "Password do not match") return redirect('/') myuser = User.objects.create_user(username=username,email=email,password=pass2) myuser.first_name = fname myuser.last_name = lname myuser.save() messages.success(request,'Your account has been created Successfully ') return redirect('/') else: return HttpResponse('NOT ALLOWED') def handleLogin(request): loginusername = request.POST['loginusername'] loginpass = request.POST['loginpass'] user = authenticate(username=loginusername,password=loginpass) if user is not None: login(request,user) messages.success(request,"Successfully Logged In ") return redirect('/') else: messages.error(request,"Please Enter the username or password correctly!") return redirect('/') @login_required(login_url='/loginsignup') def handleLogout(request): logout(request) messages.success(request,"Successfully logged out") return redirect('/') @login_required(login_url='/loginsignup') def sellbook(request): context ={'form': sellbookform()} return render(request, "home/sellbook.html", context) @login_required(login_url='/loginsignup') def savebook(request): sellername = request.user.username book_name = request.POST.get('book_name') category = request.POST.get('category') price = request.POST.get('price') image = request.FILES['image'] pickuplocation = request.POST.get('pickuplocation') slug = book_name.replace(" ", "+") + "+by+" + str(sellername) newbook = books.objects.create(sellername=sellername, book_name = book_name, category = category, price= price,image= image,pickuplocation = pickuplocation, slug= slug) try: newbook.save() messages.success(request,'Your post has been added successfully, Thank you for your great effort.') except: messages.error(request,"Sorry! unable to Process..") return redirect('/') @login_required(login_url='/loginsignup') def checkout(request): if request.method=="POST": items_json = request.POST.get('itemsJson', '') name = request.POST.get('name', '') email = request.POST.get('email', '') address = request.POST.get('address1', '') + " " + request.POST.get('address2', '') city = request.POST.get('city', '') state = request.POST.get('state', '') zip_code = request.POST.get('zip_code', '') phone = request.POST.get('phone', '') order = Order(items_json=items_json, name=name, email=email, address=address, city=city, state=state, zip_code=zip_code, phone=phone) order.save() updateorder = TrackUpdate(order_id=order.order_id,update="Your Order Is Placed") updateorder.save() thank = True id = order.order_id return render(request, 'home/checkout.html', {'thank':thank, 'id': id}) return render(request, 'home/checkout.html') def TrackOrder(request): if request.method == "POST": order_id = request.POST.get('order_id') updates = TrackUpdate.objects.filter(order_id=order_id) context = {'updates':updates} return render(request,'home/updatepage.html',context) return render(request,'home/trackorder.html') def search(request): searchquery = request.GET['search'] if len(searchquery)>200: allposts = Post.objects.none() if len(searchquery)<4: allposts = Post.objects.none() messages.error(request,'Please enter more than 4 characters') redirect('/') else: allpoststitle = books.objects.filter(book_name__icontains=searchquery) allpostscontent = books.objects.filter(category__icontains=searchquery) allposts = allpoststitle.union(allpostscontent) context = {'allposts':allposts,'search':searchquery} return render(request,'home/search.html',context) return render(request,'home/search.html') #def contact(request): # if request.method == "POST": # form_name=request.POST['from_name'] # form_lastname=request.POST['from_lastname'] # form_email=request.POST['from_email'] # form_phone=request.POST['from_phone'] # form_message=request.POST['from_message'] # # #send an Email # send_mail( # form_name,#subject # form_message,#message # form_email,#email from # ['gkaur2_be19@thapar.edu'],#email to # ) # # return render(request, 'home/contact.html', {'form_name':form_name}) # else: # return render(request, 'home/contact.html', {}) def contact(request): name='' email='' comment='' form= ContactForm(request.POST or None) if form.is_valid(): name= form.cleaned_data.get("name") email= form.cleaned_data.get("email") comment=form.cleaned_data.get("comment") subject= "A Visitor's Comment" comment= name + " with the email, " + email + ", sent the following message:\n\n" + comment; send_mail(subject, comment, email , ['gkaur2_be19@thapar.edu']) context= {'name': name} return render(request, 'home/contact.html', context) else: context= {'form': form} return render(request, 'home/contact.html', context)
36.852459
172
0.654953
from django.shortcuts import render,HttpResponse,redirect from .models import books from django.contrib.auth.models import User from django.contrib.auth import login,logout,authenticate from django.contrib.auth.decorators import login_required from django.contrib import messages from . forms import sellbookform, ContactForm from .models import Order,TrackUpdate from django.core.mail import send_mail def loginsignup(request): return render(request,'home/loginlink.html') def home(request): allProds = [] book = books.objects.all() categories = books.objects.values('category') ca = {item['category'] for item in categories} cats = list(ca) for cat in cats: prod = books.objects.filter(category = cat) allProds.append([prod,range(len(prod))]) params = {'books':book, 'cats':cats, 'allProds':allProds} return render(request,'home/home.html',params) def handleSignup(request): if request.method =='POST': username = request.POST['username'] email = request.POST['signupemail'] fname = request.POST['fname'] lname = request.POST['lname'] pass1 = request.POST['pass1'] pass2 = request.POST['pass2'] if len(username) > 25: messages.error(request, "User name must be under 25 Characters") return redirect('/') if pass1 != pass2: messages.error(request, "Password do not match") return redirect('/') myuser = User.objects.create_user(username=username,email=email,password=pass2) myuser.first_name = fname myuser.last_name = lname myuser.save() messages.success(request,'Your account has been created Successfully ') return redirect('/') else: return HttpResponse('NOT ALLOWED') def handleLogin(request): loginusername = request.POST['loginusername'] loginpass = request.POST['loginpass'] user = authenticate(username=loginusername,password=loginpass) if user is not None: login(request,user) messages.success(request,"Successfully Logged In ") return redirect('/') else: messages.error(request,"Please Enter the username or password correctly!") return redirect('/') @login_required(login_url='/loginsignup') def handleLogout(request): logout(request) messages.success(request,"Successfully logged out") return redirect('/') @login_required(login_url='/loginsignup') def sellbook(request): context ={'form': sellbookform()} return render(request, "home/sellbook.html", context) @login_required(login_url='/loginsignup') def savebook(request): sellername = request.user.username book_name = request.POST.get('book_name') category = request.POST.get('category') price = request.POST.get('price') image = request.FILES['image'] pickuplocation = request.POST.get('pickuplocation') slug = book_name.replace(" ", "+") + "+by+" + str(sellername) newbook = books.objects.create(sellername=sellername, book_name = book_name, category = category, price= price,image= image,pickuplocation = pickuplocation, slug= slug) try: newbook.save() messages.success(request,'Your post has been added successfully, Thank you for your great effort.') except: messages.error(request,"Sorry! unable to Process..") return redirect('/') @login_required(login_url='/loginsignup') def checkout(request): if request.method=="POST": items_json = request.POST.get('itemsJson', '') name = request.POST.get('name', '') email = request.POST.get('email', '') address = request.POST.get('address1', '') + " " + request.POST.get('address2', '') city = request.POST.get('city', '') state = request.POST.get('state', '') zip_code = request.POST.get('zip_code', '') phone = request.POST.get('phone', '') order = Order(items_json=items_json, name=name, email=email, address=address, city=city, state=state, zip_code=zip_code, phone=phone) order.save() updateorder = TrackUpdate(order_id=order.order_id,update="Your Order Is Placed") updateorder.save() thank = True id = order.order_id return render(request, 'home/checkout.html', {'thank':thank, 'id': id}) return render(request, 'home/checkout.html') def TrackOrder(request): if request.method == "POST": order_id = request.POST.get('order_id') updates = TrackUpdate.objects.filter(order_id=order_id) context = {'updates':updates} return render(request,'home/updatepage.html',context) return render(request,'home/trackorder.html') def search(request): searchquery = request.GET['search'] if len(searchquery)>200: allposts = Post.objects.none() if len(searchquery)<4: allposts = Post.objects.none() messages.error(request,'Please enter more than 4 characters') redirect('/') else: allpoststitle = books.objects.filter(book_name__icontains=searchquery) allpostscontent = books.objects.filter(category__icontains=searchquery) allposts = allpoststitle.union(allpostscontent) context = {'allposts':allposts,'search':searchquery} return render(request,'home/search.html',context) return render(request,'home/search.html') mail='' comment='' form= ContactForm(request.POST or None) if form.is_valid(): name= form.cleaned_data.get("name") email= form.cleaned_data.get("email") comment=form.cleaned_data.get("comment") subject= "A Visitor's Comment" comment= name + " with the email, " + email + ", sent the following message:\n\n" + comment; send_mail(subject, comment, email , ['gkaur2_be19@thapar.edu']) context= {'name': name} return render(request, 'home/contact.html', context) else: context= {'form': form} return render(request, 'home/contact.html', context)
true
true
f711aba641513f7bce1193773cb3f004e8197fbf
2,614
py
Python
demo/predict.py
lyuyangh/Cross-Attention-VizWiz-VQA
853bfe480dac5bd1363f60c6b17e25134acdc2fa
[ "MIT" ]
10
2021-07-25T12:44:34.000Z
2022-03-23T04:07:12.000Z
demo/predict.py
lyuyangh/Cross-Attention-VizWiz-VQA
853bfe480dac5bd1363f60c6b17e25134acdc2fa
[ "MIT" ]
null
null
null
demo/predict.py
lyuyangh/Cross-Attention-VizWiz-VQA
853bfe480dac5bd1363f60c6b17e25134acdc2fa
[ "MIT" ]
5
2021-07-25T12:44:35.000Z
2022-03-26T16:51:44.000Z
import datetime import json import os import sys import time import torch import torch.nn as nn from torch.autograd import Variable from torch.utils.data import DataLoader from utils.flags import FLAGS sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from model.vqa_model import ModelParams, VQAModel import demo.demo_dataset as dataset import demo.visualize as visualize class Inference: def __init__(self): self.model = self._load_model() self.demo_data = dataset.VQAFeatureDataset() def _get_answer(self, p, dataloader): _m, idx = p.max(1) return dataloader.dataset.label2ans[idx.item()] def _load_model(self): data_params = json.load(open(FLAGS.data_params_path)) model_params = ModelParams( add_self_attention=FLAGS.add_self_attention, fusion_method=FLAGS.fusion_method, question_sequence_length=dataset.MAX_QUES_SEQ_LEN, number_of_objects=dataset.NO_OBJECTS, word_embedding_dimension=data_params["word_feat_dimension"], object_embedding_dimension=data_params["image_feat_dimension"], vocabulary_size=data_params["vocabulary_size"], num_ans_candidates=data_params["number_of_answer_candidiates"], ) model = VQAModel( glove_path=FLAGS.glove_path, model_params=model_params, hidden_dimension=FLAGS.hidden_dimension, ).cuda() FLAGS.snapshot_path = ( "/home/rachana/Documents/vizwiz/save_folder/self_cross_3/final" ) model_path = FLAGS.snapshot_path print("loading %s" % model_path) model_data = torch.load(model_path) model = nn.DataParallel(model).cuda() model.load_state_dict(model_data.get("model_state", model_data)) model.train(False) return model def get_prediction(self, image_id, question, batch_size=1): self.demo_data.set_input(image_id, question) demo_data_loader = DataLoader( self.demo_data, batch_size, shuffle=False, num_workers=1, ) visual_feature, bboxes, question = iter(demo_data_loader).next() visual_feature = Variable(visual_feature).cuda() bboxes = Variable(bboxes).cuda() question = Variable(question).cuda() pred, i_att, q_att = self.model(visual_feature, question) answer = self._get_answer(pred.data, demo_data_loader) return ( answer, i_att, q_att, bboxes, )
33.512821
76
0.662586
import datetime import json import os import sys import time import torch import torch.nn as nn from torch.autograd import Variable from torch.utils.data import DataLoader from utils.flags import FLAGS sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from model.vqa_model import ModelParams, VQAModel import demo.demo_dataset as dataset import demo.visualize as visualize class Inference: def __init__(self): self.model = self._load_model() self.demo_data = dataset.VQAFeatureDataset() def _get_answer(self, p, dataloader): _m, idx = p.max(1) return dataloader.dataset.label2ans[idx.item()] def _load_model(self): data_params = json.load(open(FLAGS.data_params_path)) model_params = ModelParams( add_self_attention=FLAGS.add_self_attention, fusion_method=FLAGS.fusion_method, question_sequence_length=dataset.MAX_QUES_SEQ_LEN, number_of_objects=dataset.NO_OBJECTS, word_embedding_dimension=data_params["word_feat_dimension"], object_embedding_dimension=data_params["image_feat_dimension"], vocabulary_size=data_params["vocabulary_size"], num_ans_candidates=data_params["number_of_answer_candidiates"], ) model = VQAModel( glove_path=FLAGS.glove_path, model_params=model_params, hidden_dimension=FLAGS.hidden_dimension, ).cuda() FLAGS.snapshot_path = ( "/home/rachana/Documents/vizwiz/save_folder/self_cross_3/final" ) model_path = FLAGS.snapshot_path print("loading %s" % model_path) model_data = torch.load(model_path) model = nn.DataParallel(model).cuda() model.load_state_dict(model_data.get("model_state", model_data)) model.train(False) return model def get_prediction(self, image_id, question, batch_size=1): self.demo_data.set_input(image_id, question) demo_data_loader = DataLoader( self.demo_data, batch_size, shuffle=False, num_workers=1, ) visual_feature, bboxes, question = iter(demo_data_loader).next() visual_feature = Variable(visual_feature).cuda() bboxes = Variable(bboxes).cuda() question = Variable(question).cuda() pred, i_att, q_att = self.model(visual_feature, question) answer = self._get_answer(pred.data, demo_data_loader) return ( answer, i_att, q_att, bboxes, )
true
true
f711ad435cefa630d1bf8fbb0d00e964ba895374
1,568
py
Python
chpt9/piechart.py
GDG-Buea/learn-python
9dfe8caa4b57489cf4249bf7e64856062a0b93c2
[ "Apache-2.0" ]
null
null
null
chpt9/piechart.py
GDG-Buea/learn-python
9dfe8caa4b57489cf4249bf7e64856062a0b93c2
[ "Apache-2.0" ]
2
2018-05-21T09:39:00.000Z
2018-05-27T15:59:15.000Z
chpt9/piechart.py
GDG-Buea/learn-python
9dfe8caa4b57489cf4249bf7e64856062a0b93c2
[ "Apache-2.0" ]
2
2018-05-19T14:59:56.000Z
2018-05-19T15:25:48.000Z
# This program uses a pie chart to display the percentages of the overall grade represented by a project's, # quizzes, the midterm exam, and the final exam, 20 percent of the grade and its value is displayed in red, # quizzes are 10 percent and are displayed in blue, the midterm exam is 30 percent and is displayed in green and # the final exam is 40 percent and is displayed in orange. from tkinter import * # Import tkinter import math radius = 100 width = 300 height = 300 class PieChart: def __init__(self): window = Tk() # Create a window window.title("Pie Chart") # Set a title self.canvas = Canvas(window, bg="white", width=width, height=height) self.canvas.pack() self.drawAPie(0, 360 * 0.2, "red", "Project -- 20%") self.drawAPie(360 * 0.2, 360 * 0.1, "blue", "Quizzes -- 10%") self.drawAPie(360 * 0.2 + 360 * 0.1, 360 * 0.3, "green", "Midterm -- 30%") self.drawAPie(360 * 0.2 + 360 * 0.1 + 360 * 0.3, 360 * 0.4, "orange", "Final -- 40%") window.mainloop() # Create an event loop def drawAPie(self, start, extent, color, title): self.canvas.create_arc(width / 2 - radius, height / 2 - radius, width / 2 + radius, height / 2 + radius, start=start, extent=extent, fill=color) x = width / 2 + radius * math.cos(math.radians(extent / 2 + start)) y = height / 2 - radius * math.sin(math.radians(extent / 2 + start)) self.canvas.create_text(x, y, text=title) PieChart()
37.333333
113
0.609694
# quizzes, the midterm exam, and the final exam, 20 percent of the grade and its value is displayed in red, # quizzes are 10 percent and are displayed in blue, the midterm exam is 30 percent and is displayed in green and # the final exam is 40 percent and is displayed in orange. from tkinter import * # Import tkinter import math radius = 100 width = 300 height = 300 class PieChart: def __init__(self): window = Tk() # Create a window window.title("Pie Chart") # Set a title self.canvas = Canvas(window, bg="white", width=width, height=height) self.canvas.pack() self.drawAPie(0, 360 * 0.2, "red", "Project -- 20%") self.drawAPie(360 * 0.2, 360 * 0.1, "blue", "Quizzes -- 10%") self.drawAPie(360 * 0.2 + 360 * 0.1, 360 * 0.3, "green", "Midterm -- 30%") self.drawAPie(360 * 0.2 + 360 * 0.1 + 360 * 0.3, 360 * 0.4, "orange", "Final -- 40%") window.mainloop() # Create an event loop def drawAPie(self, start, extent, color, title): self.canvas.create_arc(width / 2 - radius, height / 2 - radius, width / 2 + radius, height / 2 + radius, start=start, extent=extent, fill=color) x = width / 2 + radius * math.cos(math.radians(extent / 2 + start)) y = height / 2 - radius * math.sin(math.radians(extent / 2 + start)) self.canvas.create_text(x, y, text=title) PieChart()
true
true
f711ad60a1c4f5370c2e0d7919222e6d32720773
3,308
py
Python
app/auth.py
n0nvme/yatb
3d2e7d7e3b2c694e4ae8a662becbe0984cf03210
[ "Apache-2.0" ]
null
null
null
app/auth.py
n0nvme/yatb
3d2e7d7e3b2c694e4ae8a662becbe0984cf03210
[ "Apache-2.0" ]
null
null
null
app/auth.py
n0nvme/yatb
3d2e7d7e3b2c694e4ae8a662becbe0984cf03210
[ "Apache-2.0" ]
null
null
null
import uuid from typing import Optional, Union from datetime import datetime, timedelta from fastapi import Request, HTTPException, status, Depends from fastapi.security import OAuth2 from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel from fastapi.security.utils import get_authorization_scheme_param from jose import JWTError, jwt from . import schema, db from .config import settings class OAuth2PasswordBearerWithCookie(OAuth2): def __init__( self, tokenUrl: str, scheme_name: str = None, scopes: dict = None, auto_error: bool = True, ): if not scopes: scopes = {} flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes}) super().__init__(flows=flows, scheme_name=scheme_name, auto_error=auto_error) async def __call__(self, request: Request) -> Optional[str]: authorization: Optional[str] = request.cookies.get("access_token") if not authorization: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="No cookie", headers={"WWW-Authenticate": "Bearer"}, ) scheme, param = get_authorization_scheme_param(authorization) if scheme.lower() != "bearer": raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated", headers={"WWW-Authenticate": "Bearer"}, ) return param oauth2_scheme = OAuth2PasswordBearerWithCookie(tokenUrl="/api/users/login") async def authenticate_user(username: str, password: str) -> Optional[schema.User]: user = await db.get_user(username) if not user: return None if user.password_hash is None or password != user.password_hash: return None return user def create_access_token(data: dict, expires_delta: Optional[timedelta] = None): to_encode = data.copy() if expires_delta: expire = datetime.utcnow() + expires_delta else: expire = datetime.utcnow() + timedelta(minutes=15) to_encode.update({"exp": expire}) encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM) return encoded_jwt def create_user_token(user: schema.User): access_token_expires = timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token(data={"user_id": str(user.user_id)}, expires_delta=access_token_expires) return access_token async def get_current_user(token: str = Depends(oauth2_scheme)): credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) user_id: Union[str, uuid.UUID, None] = None try: payload = jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) # no "alg:none" user_id = payload.get("user_id") if user_id is None: raise credentials_exception except JWTError: raise credentials_exception user_id = uuid.UUID(user_id) user = await db.get_user_uuid(uuid=user_id) if user is None: raise credentials_exception return user
34.103093
114
0.685006
import uuid from typing import Optional, Union from datetime import datetime, timedelta from fastapi import Request, HTTPException, status, Depends from fastapi.security import OAuth2 from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel from fastapi.security.utils import get_authorization_scheme_param from jose import JWTError, jwt from . import schema, db from .config import settings class OAuth2PasswordBearerWithCookie(OAuth2): def __init__( self, tokenUrl: str, scheme_name: str = None, scopes: dict = None, auto_error: bool = True, ): if not scopes: scopes = {} flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes}) super().__init__(flows=flows, scheme_name=scheme_name, auto_error=auto_error) async def __call__(self, request: Request) -> Optional[str]: authorization: Optional[str] = request.cookies.get("access_token") if not authorization: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="No cookie", headers={"WWW-Authenticate": "Bearer"}, ) scheme, param = get_authorization_scheme_param(authorization) if scheme.lower() != "bearer": raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated", headers={"WWW-Authenticate": "Bearer"}, ) return param oauth2_scheme = OAuth2PasswordBearerWithCookie(tokenUrl="/api/users/login") async def authenticate_user(username: str, password: str) -> Optional[schema.User]: user = await db.get_user(username) if not user: return None if user.password_hash is None or password != user.password_hash: return None return user def create_access_token(data: dict, expires_delta: Optional[timedelta] = None): to_encode = data.copy() if expires_delta: expire = datetime.utcnow() + expires_delta else: expire = datetime.utcnow() + timedelta(minutes=15) to_encode.update({"exp": expire}) encoded_jwt = jwt.encode(to_encode, settings.JWT_SECRET_KEY, algorithm=settings.JWT_ALGORITHM) return encoded_jwt def create_user_token(user: schema.User): access_token_expires = timedelta(minutes=settings.JWT_ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token(data={"user_id": str(user.user_id)}, expires_delta=access_token_expires) return access_token async def get_current_user(token: str = Depends(oauth2_scheme)): credentials_exception = HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, detail="Could not validate credentials", headers={"WWW-Authenticate": "Bearer"}, ) user_id: Union[str, uuid.UUID, None] = None try: payload = jwt.decode(token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]) user_id = payload.get("user_id") if user_id is None: raise credentials_exception except JWTError: raise credentials_exception user_id = uuid.UUID(user_id) user = await db.get_user_uuid(uuid=user_id) if user is None: raise credentials_exception return user
true
true
f711aea97d18c0191ecb308c8e856ac112eecb8b
511
py
Python
algoexpert/branchSums.py
mahendra-rk/coding-interview-prep
3b932d724846132a4560547809d63bd8ec67e123
[ "MIT" ]
null
null
null
algoexpert/branchSums.py
mahendra-rk/coding-interview-prep
3b932d724846132a4560547809d63bd8ec67e123
[ "MIT" ]
null
null
null
algoexpert/branchSums.py
mahendra-rk/coding-interview-prep
3b932d724846132a4560547809d63bd8ec67e123
[ "MIT" ]
null
null
null
class BinaryTree: def __init__(self, value): self.value = value self.left = None self.right = None def branchSums(root, sum=0, sumsList=None): if root is None: return if sumsList is None: #refer https://stackoverflow.com/a/60202340/6699913 sumsList = [] sum += root.value if root.left is None and root.right is None: sumsList.append(sum) branchSums(root.left, sum, sumsList) branchSums(root.right, sum, sumsList) return sumsList
26.894737
76
0.639922
class BinaryTree: def __init__(self, value): self.value = value self.left = None self.right = None def branchSums(root, sum=0, sumsList=None): if root is None: return if sumsList is None: sumsList = [] sum += root.value if root.left is None and root.right is None: sumsList.append(sum) branchSums(root.left, sum, sumsList) branchSums(root.right, sum, sumsList) return sumsList
true
true
f711b05eacbea0ffeb1d684c65680d31e02a5e89
27,154
py
Python
nnunet/experiment_planning/experiment_planner_baseline_3DUNet.py
Janetteeeeeeee/nnUNet
db654c445aa5ced436dbf842d432dbbcdc01f4b5
[ "Apache-2.0" ]
null
null
null
nnunet/experiment_planning/experiment_planner_baseline_3DUNet.py
Janetteeeeeeee/nnUNet
db654c445aa5ced436dbf842d432dbbcdc01f4b5
[ "Apache-2.0" ]
null
null
null
nnunet/experiment_planning/experiment_planner_baseline_3DUNet.py
Janetteeeeeeee/nnUNet
db654c445aa5ced436dbf842d432dbbcdc01f4b5
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import shutil from collections import OrderedDict from copy import deepcopy import nnunet import numpy as np from batchgenerators.utilities.file_and_folder_operations import * from nnunet.configuration import default_num_threads from nnunet.experiment_planning.DatasetAnalyzer import DatasetAnalyzer from nnunet.experiment_planning.common_utils import get_pool_and_conv_props_poolLateV2 from nnunet.experiment_planning.utils import create_lists_from_splitted_dataset from nnunet.network_architecture.generic_UNet import Generic_UNet from nnunet.paths import * from nnunet.preprocessing.cropping import get_case_identifier_from_npz from nnunet.training.model_restore import recursive_find_python_class class ExperimentPlanner(object): def __init__(self, folder_with_cropped_data, preprocessed_output_folder): self.folder_with_cropped_data = folder_with_cropped_data self.preprocessed_output_folder = preprocessed_output_folder self.list_of_cropped_npz_files = subfiles(self.folder_with_cropped_data, True, None, ".npz", True) self.preprocessor_name = "GenericPreprocessor" assert isfile(join(self.folder_with_cropped_data, "dataset_properties.pkl")), \ "folder_with_cropped_data must contain dataset_properties.pkl" self.dataset_properties = load_pickle(join(self.folder_with_cropped_data, "dataset_properties.pkl")) self.plans_per_stage = OrderedDict() self.plans = OrderedDict() self.plans_fname = join(self.preprocessed_output_folder, "nnUNetPlans" + "fixed_plans_3D.pkl") self.data_identifier = default_data_identifier self.transpose_forward = [0, 1, 2] self.transpose_backward = [0, 1, 2] self.unet_base_num_features = Generic_UNet.BASE_NUM_FEATURES_3D self.unet_max_num_filters = 320 self.unet_max_numpool = 999 self.unet_min_batch_size = 2 self.unet_featuremap_min_edge_length = 4 self.target_spacing_percentile = 50 self.anisotropy_threshold = 3 self.how_much_of_a_patient_must_the_network_see_at_stage0 = 4 # 1/4 of a patient self.batch_size_covers_max_percent_of_dataset = 0.05 # all samples in the batch together cannot cover more # than 5% of the entire dataset self.conv_per_stage = 2 def get_target_spacing(self): spacings = self.dataset_properties['all_spacings'] # target = np.median(np.vstack(spacings), 0) # if target spacing is very anisotropic we may want to not downsample the axis with the worst spacing # uncomment after mystery task submission """worst_spacing_axis = np.argmax(target) if max(target) > (2.5 * min(target)): spacings_of_that_axis = np.vstack(spacings)[:, worst_spacing_axis] target_spacing_of_that_axis = np.percentile(spacings_of_that_axis, 5) target[worst_spacing_axis] = target_spacing_of_that_axis""" target = np.percentile(np.vstack(spacings), self.target_spacing_percentile, 0) return target def save_my_plans(self): with open(self.plans_fname, 'wb') as f: pickle.dump(self.plans, f) def load_my_plans(self): self.plans = load_pickle(self.plans_fname) self.plans_per_stage = self.plans['plans_per_stage'] self.dataset_properties = self.plans['dataset_properties'] self.transpose_forward = self.plans['transpose_forward'] self.transpose_backward = self.plans['transpose_backward'] def determine_postprocessing(self): pass """ Spoiler: This is unused, postprocessing was removed. Ignore it. :return: print("determining postprocessing...") props_per_patient = self.dataset_properties['segmentation_props_per_patient'] all_region_keys = [i for k in props_per_patient.keys() for i in props_per_patient[k]['only_one_region'].keys()] all_region_keys = list(set(all_region_keys)) only_keep_largest_connected_component = OrderedDict() for r in all_region_keys: all_results = [props_per_patient[k]['only_one_region'][r] for k in props_per_patient.keys()] only_keep_largest_connected_component[tuple(r)] = all(all_results) print("Postprocessing: only_keep_largest_connected_component", only_keep_largest_connected_component) all_classes = self.dataset_properties['all_classes'] classes = [i for i in all_classes if i > 0] props_per_patient = self.dataset_properties['segmentation_props_per_patient'] min_size_per_class = OrderedDict() for c in classes: all_num_voxels = [] for k in props_per_patient.keys(): all_num_voxels.append(props_per_patient[k]['volume_per_class'][c]) if len(all_num_voxels) > 0: min_size_per_class[c] = np.percentile(all_num_voxels, 1) * MIN_SIZE_PER_CLASS_FACTOR else: min_size_per_class[c] = np.inf min_region_size_per_class = OrderedDict() for c in classes: region_sizes = [l for k in props_per_patient for l in props_per_patient[k]['region_volume_per_class'][c]] if len(region_sizes) > 0: min_region_size_per_class[c] = min(region_sizes) # we don't need that line but better safe than sorry, right? min_region_size_per_class[c] = min(min_region_size_per_class[c], min_size_per_class[c]) else: min_region_size_per_class[c] = 0 print("Postprocessing: min_size_per_class", min_size_per_class) print("Postprocessing: min_region_size_per_class", min_region_size_per_class) return only_keep_largest_connected_component, min_size_per_class, min_region_size_per_class """ def get_properties_for_stage(self, current_spacing, original_spacing, original_shape, num_cases, num_modalities, num_classes): """ Computation of input patch size starts out with the new median shape (in voxels) of a dataset. This is opposed to prior experiments where I based it on the median size in mm. The rationale behind this is that for some organ of interest the acquisition method will most likely be chosen such that the field of view and voxel resolution go hand in hand to show the doctor what they need to see. This assumption may be violated for some modalities with anisotropy (cine MRI) but we will have t live with that. In future experiments I will try to 1) base input patch size match aspect ratio of input size in mm (instead of voxels) and 2) to try to enforce that we see the same 'distance' in all directions (try to maintain equal size in mm of patch) The patches created here attempt keep the aspect ratio of the new_median_shape :param current_spacing: :param original_spacing: :param original_shape: :param num_cases: :return: """ new_median_shape = np.round(original_spacing / current_spacing * original_shape).astype(int) dataset_num_voxels = np.prod(new_median_shape) * num_cases # the next line is what we had before as a default. The patch size had the same aspect ratio as the median shape of a patient. We swapped t # input_patch_size = new_median_shape # compute how many voxels are one mm input_patch_size = 1 / np.array(current_spacing) # normalize voxels per mm input_patch_size /= input_patch_size.mean() # create an isotropic patch of size 512x512x512mm input_patch_size *= 1 / min(input_patch_size) * 512 # to get a starting value input_patch_size = np.round(input_patch_size).astype(int) # clip it to the median shape of the dataset because patches larger then that make not much sense input_patch_size = [min(i, j) for i, j in zip(input_patch_size, new_median_shape)] network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, new_shp, \ shape_must_be_divisible_by = get_pool_and_conv_props_poolLateV2(input_patch_size, self.unet_featuremap_min_edge_length, self.unet_max_numpool, current_spacing) ref = Generic_UNet.use_this_for_batch_size_computation_3D here = Generic_UNet.compute_approx_vram_consumption(new_shp, network_num_pool_per_axis, self.unet_base_num_features, self.unet_max_num_filters, num_modalities, num_classes, pool_op_kernel_sizes, conv_per_stage=self.conv_per_stage) while here > ref: axis_to_be_reduced = np.argsort(new_shp / new_median_shape)[-1] tmp = deepcopy(new_shp) tmp[axis_to_be_reduced] -= shape_must_be_divisible_by[axis_to_be_reduced] _, _, _, _, shape_must_be_divisible_by_new = \ get_pool_and_conv_props_poolLateV2(tmp, self.unet_featuremap_min_edge_length, self.unet_max_numpool, current_spacing) new_shp[axis_to_be_reduced] -= shape_must_be_divisible_by_new[axis_to_be_reduced] # we have to recompute numpool now: network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, new_shp, \ shape_must_be_divisible_by = get_pool_and_conv_props_poolLateV2(new_shp, self.unet_featuremap_min_edge_length, self.unet_max_numpool, current_spacing) here = Generic_UNet.compute_approx_vram_consumption(new_shp, network_num_pool_per_axis, self.unet_base_num_features, self.unet_max_num_filters, num_modalities, num_classes, pool_op_kernel_sizes, conv_per_stage=self.conv_per_stage) # print(new_shp) input_patch_size = new_shp batch_size = Generic_UNet.DEFAULT_BATCH_SIZE_3D # This is what works with 128**3 batch_size = int(np.floor(max(ref / here, 1) * batch_size)) # check if batch size is too large max_batch_size = np.round(self.batch_size_covers_max_percent_of_dataset * dataset_num_voxels / np.prod(input_patch_size, dtype=np.int64)).astype(int) max_batch_size = max(max_batch_size, self.unet_min_batch_size) batch_size = max(1, min(batch_size, max_batch_size)) do_dummy_2D_data_aug = (max(input_patch_size) / input_patch_size[ 0]) > self.anisotropy_threshold plan = { 'batch_size': batch_size, 'num_pool_per_axis': network_num_pool_per_axis, 'patch_size': input_patch_size, 'median_patient_size_in_voxels': new_median_shape, 'current_spacing': current_spacing, 'original_spacing': original_spacing, 'do_dummy_2D_data_aug': do_dummy_2D_data_aug, 'pool_op_kernel_sizes': pool_op_kernel_sizes, 'conv_kernel_sizes': conv_kernel_sizes, } return plan def plan_experiment(self): use_nonzero_mask_for_normalization = self.determine_whether_to_use_mask_for_norm() print("Are we using the nonzero mask for normalizaion?", use_nonzero_mask_for_normalization) spacings = self.dataset_properties['all_spacings'] sizes = self.dataset_properties['all_sizes'] all_classes = self.dataset_properties['all_classes'] modalities = self.dataset_properties['modalities'] num_modalities = len(list(modalities.keys())) target_spacing = self.get_target_spacing() new_shapes = [np.array(i) / target_spacing * np.array(j) for i, j in zip(spacings, sizes)] max_spacing_axis = np.argmax(target_spacing) remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis] self.transpose_forward = [max_spacing_axis] + remaining_axes self.transpose_backward = [np.argwhere(np.array(self.transpose_forward) == i)[0][0] for i in range(3)] # we base our calculations on the median shape of the datasets median_shape = np.median(np.vstack(new_shapes), 0) print("the median shape of the dataset is ", median_shape) max_shape = np.max(np.vstack(new_shapes), 0) print("the max shape in the dataset is ", max_shape) min_shape = np.min(np.vstack(new_shapes), 0) print("the min shape in the dataset is ", min_shape) print("we don't want feature maps smaller than ", self.unet_featuremap_min_edge_length, " in the bottleneck") # how many stages will the image pyramid have? self.plans_per_stage = list() target_spacing_transposed = np.array(target_spacing)[self.transpose_forward] median_shape_transposed = np.array(median_shape)[self.transpose_forward] print("the transposed median shape of the dataset is ", median_shape_transposed) print("generating configuration for 3d_fullres") self.plans_per_stage.append(self.get_properties_for_stage(target_spacing_transposed, target_spacing_transposed, median_shape_transposed, len(self.list_of_cropped_npz_files), num_modalities, len(all_classes) + 1)) # thanks Zakiyi (https://github.com/MIC-DKFZ/nnUNet/issues/61) for spotting this bug :-) # if np.prod(self.plans_per_stage[-1]['median_patient_size_in_voxels'], dtype=np.int64) / \ # architecture_input_voxels < HOW_MUCH_OF_A_PATIENT_MUST_THE_NETWORK_SEE_AT_STAGE0: architecture_input_voxels_here = np.prod(self.plans_per_stage[-1]['patch_size'], dtype=np.int64) if np.prod(median_shape) / architecture_input_voxels_here < \ self.how_much_of_a_patient_must_the_network_see_at_stage0: more = False else: more = True if more: print("generating configuration for 3d_lowres") # if we are doing more than one stage then we want the lowest stage to have exactly # HOW_MUCH_OF_A_PATIENT_MUST_THE_NETWORK_SEE_AT_STAGE0 (this is 4 by default so the number of voxels in the # median shape of the lowest stage must be 4 times as much as the network can process at once (128x128x128 by # default). Problem is that we are downsampling higher resolution axes before we start downsampling the # out-of-plane axis. We could probably/maybe do this analytically but I am lazy, so here # we do it the dumb way lowres_stage_spacing = deepcopy(target_spacing) num_voxels = np.prod(median_shape, dtype=np.float64) while num_voxels > self.how_much_of_a_patient_must_the_network_see_at_stage0 * architecture_input_voxels_here: max_spacing = max(lowres_stage_spacing) if np.any((max_spacing / lowres_stage_spacing) > 2): lowres_stage_spacing[(max_spacing / lowres_stage_spacing) > 2] \ *= 1.01 else: lowres_stage_spacing *= 1.01 num_voxels = np.prod(target_spacing / lowres_stage_spacing * median_shape, dtype=np.float64) lowres_stage_spacing_transposed = np.array(lowres_stage_spacing)[self.transpose_forward] new = self.get_properties_for_stage(lowres_stage_spacing_transposed, target_spacing_transposed, median_shape_transposed, len(self.list_of_cropped_npz_files), num_modalities, len(all_classes) + 1) architecture_input_voxels_here = np.prod(new['patch_size'], dtype=np.int64) if 2 * np.prod(new['median_patient_size_in_voxels'], dtype=np.int64) < np.prod( self.plans_per_stage[0]['median_patient_size_in_voxels'], dtype=np.int64): self.plans_per_stage.append(new) self.plans_per_stage = self.plans_per_stage[::-1] self.plans_per_stage = {i: self.plans_per_stage[i] for i in range(len(self.plans_per_stage))} # convert to dict print(self.plans_per_stage) print("transpose forward", self.transpose_forward) print("transpose backward", self.transpose_backward) normalization_schemes = self.determine_normalization_scheme() only_keep_largest_connected_component, min_size_per_class, min_region_size_per_class = None, None, None # removed training data based postprocessing. This is deprecated # these are independent of the stage plans = {'num_stages': len(list(self.plans_per_stage.keys())), 'num_modalities': num_modalities, 'modalities': modalities, 'normalization_schemes': normalization_schemes, 'dataset_properties': self.dataset_properties, 'list_of_npz_files': self.list_of_cropped_npz_files, 'original_spacings': spacings, 'original_sizes': sizes, 'preprocessed_data_folder': self.preprocessed_output_folder, 'num_classes': len(all_classes), 'all_classes': all_classes, 'base_num_features': self.unet_base_num_features, 'use_mask_for_norm': use_nonzero_mask_for_normalization, 'keep_only_largest_region': only_keep_largest_connected_component, 'min_region_size_per_class': min_region_size_per_class, 'min_size_per_class': min_size_per_class, 'transpose_forward': self.transpose_forward, 'transpose_backward': self.transpose_backward, 'data_identifier': self.data_identifier, 'plans_per_stage': self.plans_per_stage, 'preprocessor_name': self.preprocessor_name, 'conv_per_stage': self.conv_per_stage, } self.plans = plans self.save_my_plans() def determine_normalization_scheme(self): schemes = OrderedDict() modalities = self.dataset_properties['modalities'] num_modalities = len(list(modalities.keys())) for i in range(num_modalities): if modalities[i] == "CT" or modalities[i] == 'ct': schemes[i] = "CT" elif modalities[i] == 'noNorm': schemes[i] = "noNorm" else: schemes[i] = "nonCT" return schemes def save_properties_of_cropped(self, case_identifier, properties): with open(join(self.folder_with_cropped_data, "%s.pkl" % case_identifier), 'wb') as f: pickle.dump(properties, f) def load_properties_of_cropped(self, case_identifier): with open(join(self.folder_with_cropped_data, "%s.pkl" % case_identifier), 'rb') as f: properties = pickle.load(f) return properties def determine_whether_to_use_mask_for_norm(self): # only use the nonzero mask for normalization of the cropping based on it resulted in a decrease in # image size (this is an indication that the data is something like brats/isles and then we want to # normalize in the brain region only) modalities = self.dataset_properties['modalities'] num_modalities = len(list(modalities.keys())) use_nonzero_mask_for_norm = OrderedDict() for i in range(num_modalities): if "CT" in modalities[i]: use_nonzero_mask_for_norm[i] = False else: all_size_reductions = [] for k in self.dataset_properties['size_reductions'].keys(): all_size_reductions.append(self.dataset_properties['size_reductions'][k]) if np.median(all_size_reductions) < 3 / 4.: print("using nonzero mask for normalization") use_nonzero_mask_for_norm[i] = True else: print("not using nonzero mask for normalization") use_nonzero_mask_for_norm[i] = False for c in self.list_of_cropped_npz_files: case_identifier = get_case_identifier_from_npz(c) properties = self.load_properties_of_cropped(case_identifier) properties['use_nonzero_mask_for_norm'] = use_nonzero_mask_for_norm self.save_properties_of_cropped(case_identifier, properties) use_nonzero_mask_for_normalization = use_nonzero_mask_for_norm return use_nonzero_mask_for_normalization def write_normalization_scheme_to_patients(self): """ This is used for test set preprocessing :return: """ for c in self.list_of_cropped_npz_files: case_identifier = get_case_identifier_from_npz(c) properties = self.load_properties_of_cropped(case_identifier) properties['use_nonzero_mask_for_norm'] = self.plans['use_mask_for_norm'] self.save_properties_of_cropped(case_identifier, properties) def run_preprocessing(self, num_threads): if os.path.isdir(join(self.preprocessed_output_folder, "gt_segmentations")): shutil.rmtree(join(self.preprocessed_output_folder, "gt_segmentations")) shutil.copytree(join(self.folder_with_cropped_data, "gt_segmentations"), join(self.preprocessed_output_folder, "gt_segmentations")) normalization_schemes = self.plans['normalization_schemes'] use_nonzero_mask_for_normalization = self.plans['use_mask_for_norm'] intensityproperties = self.plans['dataset_properties']['intensityproperties'] preprocessor_class = recursive_find_python_class([join(nnunet.__path__[0], "preprocessing")], self.preprocessor_name, current_module="nnunet.preprocessing") assert preprocessor_class is not None preprocessor = preprocessor_class(normalization_schemes, use_nonzero_mask_for_normalization, self.transpose_forward, intensityproperties) target_spacings = [i["current_spacing"] for i in self.plans_per_stage.values()] if self.plans['num_stages'] > 1 and not isinstance(num_threads, (list, tuple)): num_threads = (default_num_threads, num_threads) elif self.plans['num_stages'] == 1 and isinstance(num_threads, (list, tuple)): num_threads = num_threads[-1] preprocessor.run(target_spacings, self.folder_with_cropped_data, self.preprocessed_output_folder, self.plans['data_identifier'], num_threads) def maybe_mkdir_p(directory): directory = os.path.abspath(directory) splits = directory.split("\\")[1:] base = directory.split('\\')[0] for i in range(0, len(splits)): if not os.path.isdir(join(base, join("\\", *splits[:i+1]))): try: os.mkdir(join(base, join("\\", *splits[:i+1]))) except FileExistsError: # this can sometimes happen when two jobs try to create the same directory at the same time, # especially on network drives. print("WARNING: Folder %s already existed and does not need to be created" % directory) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-t", "--task_ids", nargs="+", help="list of int") parser.add_argument("-p", action="store_true", help="set this if you actually want to run the preprocessing. If " "this is not set then this script will only create the plans file") parser.add_argument("-tl", type=int, required=False, default=8, help="num_threads_lowres") parser.add_argument("-tf", type=int, required=False, default=8, help="num_threads_fullres") args = parser.parse_args() task_ids = args.task_ids run_preprocessing = args.p tl = args.tl tf = args.tf tasks = [] for i in task_ids: i = int(i) candidates = subdirs(nnUNet_cropped_data, prefix="Task%03.0d" % i, join=False) assert len(candidates) == 1 tasks.append(candidates[0]) for t in tasks: try: print("\n\n\n", t) cropped_out_dir = os.path.join(nnUNet_cropped_data, t) preprocessing_output_dir_this_task = os.path.join(preprocessing_output_dir, t) splitted_4d_output_dir_task = os.path.join(nnUNet_raw_data, t) lists, modalities = create_lists_from_splitted_dataset(splitted_4d_output_dir_task) dataset_analyzer = DatasetAnalyzer(cropped_out_dir, overwrite=False) _ = dataset_analyzer.analyze_dataset() # this will write output files that will be used by the ExperimentPlanner maybe_mkdir_p(preprocessing_output_dir_this_task) shutil.copy(join(cropped_out_dir, "dataset_properties.pkl"), preprocessing_output_dir_this_task) shutil.copy(join(nnUNet_raw_data, t, "dataset.json"), preprocessing_output_dir_this_task) threads = (tl, tf) print("number of threads: ", threads, "\n") exp_planner = ExperimentPlanner(cropped_out_dir, preprocessing_output_dir_this_task) exp_planner.plan_experiment() if run_preprocessing: exp_planner.run_preprocessing(threads) except Exception as e: print(e)
53.558185
147
0.653348
import shutil from collections import OrderedDict from copy import deepcopy import nnunet import numpy as np from batchgenerators.utilities.file_and_folder_operations import * from nnunet.configuration import default_num_threads from nnunet.experiment_planning.DatasetAnalyzer import DatasetAnalyzer from nnunet.experiment_planning.common_utils import get_pool_and_conv_props_poolLateV2 from nnunet.experiment_planning.utils import create_lists_from_splitted_dataset from nnunet.network_architecture.generic_UNet import Generic_UNet from nnunet.paths import * from nnunet.preprocessing.cropping import get_case_identifier_from_npz from nnunet.training.model_restore import recursive_find_python_class class ExperimentPlanner(object): def __init__(self, folder_with_cropped_data, preprocessed_output_folder): self.folder_with_cropped_data = folder_with_cropped_data self.preprocessed_output_folder = preprocessed_output_folder self.list_of_cropped_npz_files = subfiles(self.folder_with_cropped_data, True, None, ".npz", True) self.preprocessor_name = "GenericPreprocessor" assert isfile(join(self.folder_with_cropped_data, "dataset_properties.pkl")), \ "folder_with_cropped_data must contain dataset_properties.pkl" self.dataset_properties = load_pickle(join(self.folder_with_cropped_data, "dataset_properties.pkl")) self.plans_per_stage = OrderedDict() self.plans = OrderedDict() self.plans_fname = join(self.preprocessed_output_folder, "nnUNetPlans" + "fixed_plans_3D.pkl") self.data_identifier = default_data_identifier self.transpose_forward = [0, 1, 2] self.transpose_backward = [0, 1, 2] self.unet_base_num_features = Generic_UNet.BASE_NUM_FEATURES_3D self.unet_max_num_filters = 320 self.unet_max_numpool = 999 self.unet_min_batch_size = 2 self.unet_featuremap_min_edge_length = 4 self.target_spacing_percentile = 50 self.anisotropy_threshold = 3 self.how_much_of_a_patient_must_the_network_see_at_stage0 = 4 self.batch_size_covers_max_percent_of_dataset = 0.05 self.conv_per_stage = 2 def get_target_spacing(self): spacings = self.dataset_properties['all_spacings'] target = np.percentile(np.vstack(spacings), self.target_spacing_percentile, 0) return target def save_my_plans(self): with open(self.plans_fname, 'wb') as f: pickle.dump(self.plans, f) def load_my_plans(self): self.plans = load_pickle(self.plans_fname) self.plans_per_stage = self.plans['plans_per_stage'] self.dataset_properties = self.plans['dataset_properties'] self.transpose_forward = self.plans['transpose_forward'] self.transpose_backward = self.plans['transpose_backward'] def determine_postprocessing(self): pass def get_properties_for_stage(self, current_spacing, original_spacing, original_shape, num_cases, num_modalities, num_classes): new_median_shape = np.round(original_spacing / current_spacing * original_shape).astype(int) dataset_num_voxels = np.prod(new_median_shape) * num_cases input_patch_size = 1 / np.array(current_spacing) input_patch_size /= input_patch_size.mean() input_patch_size *= 1 / min(input_patch_size) * 512 input_patch_size = np.round(input_patch_size).astype(int) input_patch_size = [min(i, j) for i, j in zip(input_patch_size, new_median_shape)] network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, new_shp, \ shape_must_be_divisible_by = get_pool_and_conv_props_poolLateV2(input_patch_size, self.unet_featuremap_min_edge_length, self.unet_max_numpool, current_spacing) ref = Generic_UNet.use_this_for_batch_size_computation_3D here = Generic_UNet.compute_approx_vram_consumption(new_shp, network_num_pool_per_axis, self.unet_base_num_features, self.unet_max_num_filters, num_modalities, num_classes, pool_op_kernel_sizes, conv_per_stage=self.conv_per_stage) while here > ref: axis_to_be_reduced = np.argsort(new_shp / new_median_shape)[-1] tmp = deepcopy(new_shp) tmp[axis_to_be_reduced] -= shape_must_be_divisible_by[axis_to_be_reduced] _, _, _, _, shape_must_be_divisible_by_new = \ get_pool_and_conv_props_poolLateV2(tmp, self.unet_featuremap_min_edge_length, self.unet_max_numpool, current_spacing) new_shp[axis_to_be_reduced] -= shape_must_be_divisible_by_new[axis_to_be_reduced] network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, new_shp, \ shape_must_be_divisible_by = get_pool_and_conv_props_poolLateV2(new_shp, self.unet_featuremap_min_edge_length, self.unet_max_numpool, current_spacing) here = Generic_UNet.compute_approx_vram_consumption(new_shp, network_num_pool_per_axis, self.unet_base_num_features, self.unet_max_num_filters, num_modalities, num_classes, pool_op_kernel_sizes, conv_per_stage=self.conv_per_stage) input_patch_size = new_shp batch_size = Generic_UNet.DEFAULT_BATCH_SIZE_3D batch_size = int(np.floor(max(ref / here, 1) * batch_size)) max_batch_size = np.round(self.batch_size_covers_max_percent_of_dataset * dataset_num_voxels / np.prod(input_patch_size, dtype=np.int64)).astype(int) max_batch_size = max(max_batch_size, self.unet_min_batch_size) batch_size = max(1, min(batch_size, max_batch_size)) do_dummy_2D_data_aug = (max(input_patch_size) / input_patch_size[ 0]) > self.anisotropy_threshold plan = { 'batch_size': batch_size, 'num_pool_per_axis': network_num_pool_per_axis, 'patch_size': input_patch_size, 'median_patient_size_in_voxels': new_median_shape, 'current_spacing': current_spacing, 'original_spacing': original_spacing, 'do_dummy_2D_data_aug': do_dummy_2D_data_aug, 'pool_op_kernel_sizes': pool_op_kernel_sizes, 'conv_kernel_sizes': conv_kernel_sizes, } return plan def plan_experiment(self): use_nonzero_mask_for_normalization = self.determine_whether_to_use_mask_for_norm() print("Are we using the nonzero mask for normalizaion?", use_nonzero_mask_for_normalization) spacings = self.dataset_properties['all_spacings'] sizes = self.dataset_properties['all_sizes'] all_classes = self.dataset_properties['all_classes'] modalities = self.dataset_properties['modalities'] num_modalities = len(list(modalities.keys())) target_spacing = self.get_target_spacing() new_shapes = [np.array(i) / target_spacing * np.array(j) for i, j in zip(spacings, sizes)] max_spacing_axis = np.argmax(target_spacing) remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis] self.transpose_forward = [max_spacing_axis] + remaining_axes self.transpose_backward = [np.argwhere(np.array(self.transpose_forward) == i)[0][0] for i in range(3)] median_shape = np.median(np.vstack(new_shapes), 0) print("the median shape of the dataset is ", median_shape) max_shape = np.max(np.vstack(new_shapes), 0) print("the max shape in the dataset is ", max_shape) min_shape = np.min(np.vstack(new_shapes), 0) print("the min shape in the dataset is ", min_shape) print("we don't want feature maps smaller than ", self.unet_featuremap_min_edge_length, " in the bottleneck") # how many stages will the image pyramid have? self.plans_per_stage = list() target_spacing_transposed = np.array(target_spacing)[self.transpose_forward] median_shape_transposed = np.array(median_shape)[self.transpose_forward] print("the transposed median shape of the dataset is ", median_shape_transposed) print("generating configuration for 3d_fullres") self.plans_per_stage.append(self.get_properties_for_stage(target_spacing_transposed, target_spacing_transposed, median_shape_transposed, len(self.list_of_cropped_npz_files), num_modalities, len(all_classes) + 1)) # thanks Zakiyi (https://github.com/MIC-DKFZ/nnUNet/issues/61) for spotting this bug :-) # if np.prod(self.plans_per_stage[-1]['median_patient_size_in_voxels'], dtype=np.int64) / \ # architecture_input_voxels < HOW_MUCH_OF_A_PATIENT_MUST_THE_NETWORK_SEE_AT_STAGE0: architecture_input_voxels_here = np.prod(self.plans_per_stage[-1]['patch_size'], dtype=np.int64) if np.prod(median_shape) / architecture_input_voxels_here < \ self.how_much_of_a_patient_must_the_network_see_at_stage0: more = False else: more = True if more: print("generating configuration for 3d_lowres") # if we are doing more than one stage then we want the lowest stage to have exactly # HOW_MUCH_OF_A_PATIENT_MUST_THE_NETWORK_SEE_AT_STAGE0 (this is 4 by default so the number of voxels in the # median shape of the lowest stage must be 4 times as much as the network can process at once (128x128x128 by # default). Problem is that we are downsampling higher resolution axes before we start downsampling the # out-of-plane axis. We could probably/maybe do this analytically but I am lazy, so here # we do it the dumb way lowres_stage_spacing = deepcopy(target_spacing) num_voxels = np.prod(median_shape, dtype=np.float64) while num_voxels > self.how_much_of_a_patient_must_the_network_see_at_stage0 * architecture_input_voxels_here: max_spacing = max(lowres_stage_spacing) if np.any((max_spacing / lowres_stage_spacing) > 2): lowres_stage_spacing[(max_spacing / lowres_stage_spacing) > 2] \ *= 1.01 else: lowres_stage_spacing *= 1.01 num_voxels = np.prod(target_spacing / lowres_stage_spacing * median_shape, dtype=np.float64) lowres_stage_spacing_transposed = np.array(lowres_stage_spacing)[self.transpose_forward] new = self.get_properties_for_stage(lowres_stage_spacing_transposed, target_spacing_transposed, median_shape_transposed, len(self.list_of_cropped_npz_files), num_modalities, len(all_classes) + 1) architecture_input_voxels_here = np.prod(new['patch_size'], dtype=np.int64) if 2 * np.prod(new['median_patient_size_in_voxels'], dtype=np.int64) < np.prod( self.plans_per_stage[0]['median_patient_size_in_voxels'], dtype=np.int64): self.plans_per_stage.append(new) self.plans_per_stage = self.plans_per_stage[::-1] self.plans_per_stage = {i: self.plans_per_stage[i] for i in range(len(self.plans_per_stage))} # convert to dict print(self.plans_per_stage) print("transpose forward", self.transpose_forward) print("transpose backward", self.transpose_backward) normalization_schemes = self.determine_normalization_scheme() only_keep_largest_connected_component, min_size_per_class, min_region_size_per_class = None, None, None # removed training data based postprocessing. This is deprecated # these are independent of the stage plans = {'num_stages': len(list(self.plans_per_stage.keys())), 'num_modalities': num_modalities, 'modalities': modalities, 'normalization_schemes': normalization_schemes, 'dataset_properties': self.dataset_properties, 'list_of_npz_files': self.list_of_cropped_npz_files, 'original_spacings': spacings, 'original_sizes': sizes, 'preprocessed_data_folder': self.preprocessed_output_folder, 'num_classes': len(all_classes), 'all_classes': all_classes, 'base_num_features': self.unet_base_num_features, 'use_mask_for_norm': use_nonzero_mask_for_normalization, 'keep_only_largest_region': only_keep_largest_connected_component, 'min_region_size_per_class': min_region_size_per_class, 'min_size_per_class': min_size_per_class, 'transpose_forward': self.transpose_forward, 'transpose_backward': self.transpose_backward, 'data_identifier': self.data_identifier, 'plans_per_stage': self.plans_per_stage, 'preprocessor_name': self.preprocessor_name, 'conv_per_stage': self.conv_per_stage, } self.plans = plans self.save_my_plans() def determine_normalization_scheme(self): schemes = OrderedDict() modalities = self.dataset_properties['modalities'] num_modalities = len(list(modalities.keys())) for i in range(num_modalities): if modalities[i] == "CT" or modalities[i] == 'ct': schemes[i] = "CT" elif modalities[i] == 'noNorm': schemes[i] = "noNorm" else: schemes[i] = "nonCT" return schemes def save_properties_of_cropped(self, case_identifier, properties): with open(join(self.folder_with_cropped_data, "%s.pkl" % case_identifier), 'wb') as f: pickle.dump(properties, f) def load_properties_of_cropped(self, case_identifier): with open(join(self.folder_with_cropped_data, "%s.pkl" % case_identifier), 'rb') as f: properties = pickle.load(f) return properties def determine_whether_to_use_mask_for_norm(self): # only use the nonzero mask for normalization of the cropping based on it resulted in a decrease in # image size (this is an indication that the data is something like brats/isles and then we want to # normalize in the brain region only) modalities = self.dataset_properties['modalities'] num_modalities = len(list(modalities.keys())) use_nonzero_mask_for_norm = OrderedDict() for i in range(num_modalities): if "CT" in modalities[i]: use_nonzero_mask_for_norm[i] = False else: all_size_reductions = [] for k in self.dataset_properties['size_reductions'].keys(): all_size_reductions.append(self.dataset_properties['size_reductions'][k]) if np.median(all_size_reductions) < 3 / 4.: print("using nonzero mask for normalization") use_nonzero_mask_for_norm[i] = True else: print("not using nonzero mask for normalization") use_nonzero_mask_for_norm[i] = False for c in self.list_of_cropped_npz_files: case_identifier = get_case_identifier_from_npz(c) properties = self.load_properties_of_cropped(case_identifier) properties['use_nonzero_mask_for_norm'] = use_nonzero_mask_for_norm self.save_properties_of_cropped(case_identifier, properties) use_nonzero_mask_for_normalization = use_nonzero_mask_for_norm return use_nonzero_mask_for_normalization def write_normalization_scheme_to_patients(self): for c in self.list_of_cropped_npz_files: case_identifier = get_case_identifier_from_npz(c) properties = self.load_properties_of_cropped(case_identifier) properties['use_nonzero_mask_for_norm'] = self.plans['use_mask_for_norm'] self.save_properties_of_cropped(case_identifier, properties) def run_preprocessing(self, num_threads): if os.path.isdir(join(self.preprocessed_output_folder, "gt_segmentations")): shutil.rmtree(join(self.preprocessed_output_folder, "gt_segmentations")) shutil.copytree(join(self.folder_with_cropped_data, "gt_segmentations"), join(self.preprocessed_output_folder, "gt_segmentations")) normalization_schemes = self.plans['normalization_schemes'] use_nonzero_mask_for_normalization = self.plans['use_mask_for_norm'] intensityproperties = self.plans['dataset_properties']['intensityproperties'] preprocessor_class = recursive_find_python_class([join(nnunet.__path__[0], "preprocessing")], self.preprocessor_name, current_module="nnunet.preprocessing") assert preprocessor_class is not None preprocessor = preprocessor_class(normalization_schemes, use_nonzero_mask_for_normalization, self.transpose_forward, intensityproperties) target_spacings = [i["current_spacing"] for i in self.plans_per_stage.values()] if self.plans['num_stages'] > 1 and not isinstance(num_threads, (list, tuple)): num_threads = (default_num_threads, num_threads) elif self.plans['num_stages'] == 1 and isinstance(num_threads, (list, tuple)): num_threads = num_threads[-1] preprocessor.run(target_spacings, self.folder_with_cropped_data, self.preprocessed_output_folder, self.plans['data_identifier'], num_threads) def maybe_mkdir_p(directory): directory = os.path.abspath(directory) splits = directory.split("\\")[1:] base = directory.split('\\')[0] for i in range(0, len(splits)): if not os.path.isdir(join(base, join("\\", *splits[:i+1]))): try: os.mkdir(join(base, join("\\", *splits[:i+1]))) except FileExistsError: # this can sometimes happen when two jobs try to create the same directory at the same time, # especially on network drives. print("WARNING: Folder %s already existed and does not need to be created" % directory) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("-t", "--task_ids", nargs="+", help="list of int") parser.add_argument("-p", action="store_true", help="set this if you actually want to run the preprocessing. If " "this is not set then this script will only create the plans file") parser.add_argument("-tl", type=int, required=False, default=8, help="num_threads_lowres") parser.add_argument("-tf", type=int, required=False, default=8, help="num_threads_fullres") args = parser.parse_args() task_ids = args.task_ids run_preprocessing = args.p tl = args.tl tf = args.tf tasks = [] for i in task_ids: i = int(i) candidates = subdirs(nnUNet_cropped_data, prefix="Task%03.0d" % i, join=False) assert len(candidates) == 1 tasks.append(candidates[0]) for t in tasks: try: print("\n\n\n", t) cropped_out_dir = os.path.join(nnUNet_cropped_data, t) preprocessing_output_dir_this_task = os.path.join(preprocessing_output_dir, t) splitted_4d_output_dir_task = os.path.join(nnUNet_raw_data, t) lists, modalities = create_lists_from_splitted_dataset(splitted_4d_output_dir_task) dataset_analyzer = DatasetAnalyzer(cropped_out_dir, overwrite=False) _ = dataset_analyzer.analyze_dataset() # this will write output files that will be used by the ExperimentPlanner maybe_mkdir_p(preprocessing_output_dir_this_task) shutil.copy(join(cropped_out_dir, "dataset_properties.pkl"), preprocessing_output_dir_this_task) shutil.copy(join(nnUNet_raw_data, t, "dataset.json"), preprocessing_output_dir_this_task) threads = (tl, tf) print("number of threads: ", threads, "\n") exp_planner = ExperimentPlanner(cropped_out_dir, preprocessing_output_dir_this_task) exp_planner.plan_experiment() if run_preprocessing: exp_planner.run_preprocessing(threads) except Exception as e: print(e)
true
true
f711b083c504df10c30064706b2da45b8171ea7e
2,438
py
Python
zipline/data/dataset.py
michaeljohnbennett/zipline
29321af1b472d72b759a71c9f5ba341109fc0e6d
[ "Apache-2.0" ]
1
2015-10-22T03:38:19.000Z
2015-10-22T03:38:19.000Z
zipline/data/dataset.py
michaeljohnbennett/zipline
29321af1b472d72b759a71c9f5ba341109fc0e6d
[ "Apache-2.0" ]
null
null
null
zipline/data/dataset.py
michaeljohnbennett/zipline
29321af1b472d72b759a71c9f5ba341109fc0e6d
[ "Apache-2.0" ]
null
null
null
""" dataset.py """ from six import ( iteritems, with_metaclass, ) from zipline.modelling.term import Term class Column(object): """ An abstract column of data, not yet associated with a dataset. """ def __init__(self, dtype): self.dtype = dtype def bind(self, dataset, name): """ Bind a column to a concrete dataset. """ return BoundColumn(dtype=self.dtype, dataset=dataset, name=name) class BoundColumn(Term): """ A Column of data that's been concretely bound to a particular dataset. """ def __new__(cls, dtype, dataset, name): return super(BoundColumn, cls).__new__( cls, inputs=(), window_length=0, domain=dataset.domain, dtype=dtype, dataset=dataset, name=name, ) def _init(self, dataset, name, *args, **kwargs): self._dataset = dataset self._name = name return super(BoundColumn, self)._init(*args, **kwargs) @classmethod def static_identity(cls, dataset, name, *args, **kwargs): return ( super(BoundColumn, cls).static_identity(*args, **kwargs), dataset, name, ) @property def dataset(self): return self._dataset @property def name(self): return self._name @property def qualname(self): """ Fully qualified of this column. """ return '.'.join([self.dataset.__name__, self.name]) def __repr__(self): return "{qualname}::{dtype}".format( qualname=self.qualname, dtype=self.dtype.__name__, ) class DataSetMeta(type): """ Metaclass for DataSets Supplies name and dataset information to Column attributes. """ def __new__(mcls, name, bases, dict_): newtype = type.__new__(mcls, name, bases, dict_) _columns = [] for maybe_colname, maybe_column in iteritems(dict_): if isinstance(maybe_column, Column): bound_column = maybe_column.bind(newtype, maybe_colname) setattr(newtype, maybe_colname, bound_column) _columns.append(bound_column) newtype._columns = _columns return newtype @property def columns(self): return self._columns class DataSet(with_metaclass(DataSetMeta)): domain = None
23.442308
74
0.586957
from six import ( iteritems, with_metaclass, ) from zipline.modelling.term import Term class Column(object): def __init__(self, dtype): self.dtype = dtype def bind(self, dataset, name): return BoundColumn(dtype=self.dtype, dataset=dataset, name=name) class BoundColumn(Term): def __new__(cls, dtype, dataset, name): return super(BoundColumn, cls).__new__( cls, inputs=(), window_length=0, domain=dataset.domain, dtype=dtype, dataset=dataset, name=name, ) def _init(self, dataset, name, *args, **kwargs): self._dataset = dataset self._name = name return super(BoundColumn, self)._init(*args, **kwargs) @classmethod def static_identity(cls, dataset, name, *args, **kwargs): return ( super(BoundColumn, cls).static_identity(*args, **kwargs), dataset, name, ) @property def dataset(self): return self._dataset @property def name(self): return self._name @property def qualname(self): return '.'.join([self.dataset.__name__, self.name]) def __repr__(self): return "{qualname}::{dtype}".format( qualname=self.qualname, dtype=self.dtype.__name__, ) class DataSetMeta(type): def __new__(mcls, name, bases, dict_): newtype = type.__new__(mcls, name, bases, dict_) _columns = [] for maybe_colname, maybe_column in iteritems(dict_): if isinstance(maybe_column, Column): bound_column = maybe_column.bind(newtype, maybe_colname) setattr(newtype, maybe_colname, bound_column) _columns.append(bound_column) newtype._columns = _columns return newtype @property def columns(self): return self._columns class DataSet(with_metaclass(DataSetMeta)): domain = None
true
true
f711b087c93d9720507df164842f895b8535e578
1,165
py
Python
setup.py
zhnlk/nbcrawler
38152727d2e94f73392a8c0f392d118023072e2e
[ "MIT" ]
null
null
null
setup.py
zhnlk/nbcrawler
38152727d2e94f73392a8c0f392d118023072e2e
[ "MIT" ]
null
null
null
setup.py
zhnlk/nbcrawler
38152727d2e94f73392a8c0f392d118023072e2e
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- __author__ = 'zhnlk' import os from setuptools import setup import cores def getSubpackages(name): """获取该模块下所有的子模块名称""" splist = [] for dirpath, _dirnames, _filenames in os.walk(name): if os.path.isfile(os.path.join(dirpath, '__init__.py')): splist.append(".".join(dirpath.split(os.sep))) return splist setup( name='nbcrawler', version=cores.__version__, author=cores.__author__, author_email='tomleader0828@gmail.com', license='MIT', url='http://github.com/zhnlk/nbcrawler', description='A crawler framework for NewBanker', long_description=__doc__, keywords='crawler newbanker spider distribute ', classifiers=['Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries', 'Programming Language :: Python :: Implementation :: CPython', 'License :: OSI Approved :: MIT License'], packages=getSubpackages('vnpy'), )
29.871795
79
0.619742
__author__ = 'zhnlk' import os from setuptools import setup import cores def getSubpackages(name): splist = [] for dirpath, _dirnames, _filenames in os.walk(name): if os.path.isfile(os.path.join(dirpath, '__init__.py')): splist.append(".".join(dirpath.split(os.sep))) return splist setup( name='nbcrawler', version=cores.__version__, author=cores.__author__, author_email='tomleader0828@gmail.com', license='MIT', url='http://github.com/zhnlk/nbcrawler', description='A crawler framework for NewBanker', long_description=__doc__, keywords='crawler newbanker spider distribute ', classifiers=['Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries', 'Programming Language :: Python :: Implementation :: CPython', 'License :: OSI Approved :: MIT License'], packages=getSubpackages('vnpy'), )
true
true
f711b0bb3f6d53da7f1abb4833845da364b2fe94
40
py
Python
rdkit/sping/Pyart/__init__.py
kazuyaujihara/rdkit
06027dcd05674787b61f27ba46ec0d42a6037540
[ "BSD-3-Clause" ]
1,609
2015-01-05T02:41:13.000Z
2022-03-30T21:57:24.000Z
rdkit/sping/Pyart/__init__.py
kazuyaujihara/rdkit
06027dcd05674787b61f27ba46ec0d42a6037540
[ "BSD-3-Clause" ]
3,412
2015-01-06T12:13:33.000Z
2022-03-31T17:25:41.000Z
rdkit/sping/Pyart/__init__.py
kazuyaujihara/rdkit
06027dcd05674787b61f27ba46ec0d42a6037540
[ "BSD-3-Clause" ]
811
2015-01-11T03:33:48.000Z
2022-03-28T11:57:49.000Z
# sping:: pyart from pidPyart import *
10
22
0.7
from pidPyart import *
true
true
f711b16ed81ea95da9f1a3eb15d120593368ad8a
29,162
py
Python
randomvars/tests/test__continuous.py
echasnovski/randomvars
15417b0e3ecd27f185b70471102c158f60d51c28
[ "MIT" ]
null
null
null
randomvars/tests/test__continuous.py
echasnovski/randomvars
15417b0e3ecd27f185b70471102c158f60d51c28
[ "MIT" ]
null
null
null
randomvars/tests/test__continuous.py
echasnovski/randomvars
15417b0e3ecd27f185b70471102c158f60d51c28
[ "MIT" ]
null
null
null
# pylint: disable=missing-function-docstring """Tests for '_continuous.py' file""" import numpy as np from numpy.testing import assert_array_equal, assert_array_almost_equal import scipy.stats.distributions as distrs from scipy.stats.kde import gaussian_kde from scipy.integrate import quad import pytest from randomvars._continuous import Cont from randomvars.tests.commontests import ( DECIMAL, _test_equal_rand, _test_equal_seq, _test_from_rv_rand, _test_from_sample_rand, _test_input_coercion, _test_log_fun, _test_one_value_input, _test_rvs_method, declass, h, ) from randomvars.options import config DISTRIBUTIONS_COMMON = { "beta": distrs.beta(a=10, b=20), "chi_sq": distrs.chi2(df=10), "expon": distrs.expon(), "f": distrs.f(dfn=20, dfd=20), "gamma": distrs.gamma(a=10), "laplace": distrs.laplace(), "lognorm": distrs.lognorm(s=0.5), "norm": distrs.norm(), "norm2": distrs.norm(loc=10), "norm3": distrs.norm(scale=0.1), "norm4": distrs.norm(scale=10), "norm5": distrs.norm(loc=10, scale=0.1), "t": distrs.t(df=10), "uniform": distrs.uniform(), "uniform2": distrs.uniform(loc=10, scale=0.1), "weibull_max": distrs.weibull_max(c=2), "weibull_min": distrs.weibull_min(c=2), } DISTRIBUTIONS_INF_DENSITY = { "inf_beta_both": distrs.beta(a=0.4, b=0.6), "inf_beta_left": distrs.beta(a=0.5, b=2), "inf_beta_right": distrs.beta(a=2, b=0.5), "inf_chi_sq": distrs.chi2(df=1), "inf_weibull_max": distrs.weibull_max(c=0.5), "inf_weibull_min": distrs.weibull_min(c=0.5), } DISTRIBUTIONS_HEAVY_TAILS = { "heavy_cauchy": distrs.cauchy(), "heavy_lognorm": distrs.lognorm(s=1), "heavy_t": distrs.t(df=2), } DISTRIBUTIONS = { **DISTRIBUTIONS_COMMON, **DISTRIBUTIONS_HEAVY_TAILS, **DISTRIBUTIONS_INF_DENSITY, } def augment_grid(x, n_inner_points): test_arr = [ np.linspace(x[i], x[i + 1], n_inner_points + 1, endpoint=False) for i in np.arange(len(x) - 1) ] test_arr.append([x[-1]]) return np.concatenate(test_arr) def from_sample_cdf_max_error(x): rv = Cont.from_sample(x) density = config.estimator_cont(x) x_grid = augment_grid(rv.x, 10) # Efficient way of computing `quad(density, -np.inf, x_grid)` x_grid_ext = np.concatenate([[-np.inf], x_grid]) cdf_intervals = np.array( [ quad(density, x_l, x_r)[0] for x_l, x_r in zip(x_grid_ext[:-1], x_grid_ext[1:]) ] ) cdf_grid = np.cumsum(cdf_intervals) err = cdf_grid - rv.cdf(x_grid) return np.max(np.abs(err)) def circle_fun(x, low, high): x = np.array(x) center = 0.5 * (high + low) radius = 0.5 * (high - low) res = np.zeros_like(x) center_dist = np.abs(x - center) is_in = center_dist <= radius res[is_in] = np.sqrt(radius ** 2 - center_dist[is_in] ** 2) return res def make_circ_density(intervals): """Construct circular density Density looks like half-circles with diameters lying in elements of `intervals`. Total integral is equal to 1. Parameters ---------- intervals : iterable with elements being 2-element iterables Iterable of intervals with non-zero density. Returns ------- density : callable Function which returns density values. """ def density(x): res = np.zeros_like(x) tot_integral = 0 for low, high in intervals: res += circle_fun(x, low, high) # There is only half of circle tot_integral += np.pi * (high - low) ** 2 / 8 return res / tot_integral return density class TestCont: """Regression tests for `Cont` class""" def test_init_errors(self): def check_one_input(def_args, var): with pytest.raises(TypeError, match=f"`{var}`.*numpy array"): def_args[var] = {"a": None} Cont(**def_args) with pytest.raises(TypeError, match=f"`{var}`.*float"): def_args[var] = ["a", "a"] Cont(**def_args) with pytest.raises(TypeError, match=f"`{var}`.*finite values"): def_args[var] = [0, np.nan] Cont(**def_args) with pytest.raises(TypeError, match=f"`{var}`.*finite values"): def_args[var] = [0, np.inf] Cont(**def_args) with pytest.raises(ValueError, match=f"`{var}`.*1d array"): def_args[var] = [[0, 1]] Cont(**def_args) check_one_input({"y": [1, 1]}, "x") check_one_input({"x": [0, 1]}, "y") with pytest.raises(ValueError, match="[Ll]engths.*match"): Cont([0, 1], [1, 1, 1]) with pytest.raises(ValueError, match="two"): Cont([1], [1]) with pytest.warns(UserWarning, match="`x`.*not sorted.*`x` and `y`"): rv = Cont([1, 0], [0, 2]) rv_ref = Cont([0, 1], [2, 0]) _test_equal_rand(rv, rv_ref) with pytest.raises(ValueError, match="`y`.*negative"): Cont([0, 1], [1, -1]) with pytest.raises(ValueError, match="`y`.*no positive"): Cont([0, 1], [0, 0]) def test_init(self): x_ref = np.array([0, 1, 2]) y_ref = np.array([0, 1, 0]) rv_ref = Cont(x_ref, y_ref) # Simple case with non-numpy input rv_1 = Cont(x=x_ref.tolist(), y=y_ref.tolist()) _test_equal_rand(rv_1, rv_ref) # Check if `y` is normalized rv_2 = Cont(x=x_ref, y=10 * y_ref) _test_equal_rand(rv_2, rv_ref) # Check if `x` and `y` are rearranged if not sorted with pytest.warns(UserWarning, match="`x`.*not sorted"): rv_3 = Cont(x=x_ref[[1, 0, 2]], y=y_ref[[1, 0, 2]]) _test_equal_rand(rv_3, rv_ref) # Check if duplicated values are removed from `x` with pytest.warns(UserWarning, match="duplicated"): # First pair of xy-grid is taken among duplicates rv_4 = Cont(x=x_ref[[0, 1, 1, 2]], y=y_ref[[0, 1, 2, 2]]) _test_equal_rand(rv_4, rv_ref) def test_str(self): rv = Cont([0, 2, 4], [0, 1, 0]) assert str(rv) == "Continuous RV with 2 intervals (support: [0.0, 4.0])" # Uses singular noun with one interval rv = Cont([0, 1], [1, 1]) assert str(rv) == "Continuous RV with 1 interval (support: [0.0, 1.0])" def test_properties(self): x = np.arange(11) y = np.repeat(0.1, 11) rv = Cont(x, y) assert list(rv.params.keys()) == ["x", "y"] assert_array_equal(rv.params["x"], x) assert_array_equal(rv.params["y"], y) assert_array_equal(rv.x, x) assert_array_equal(rv.y, y) assert rv.a == 0.0 assert rv.b == 10.0 def test_support(self): rv = Cont([0.5, 1.5, 4.5], [0, 0.5, 0]) assert rv.support() == (0.5, 4.5) def test_compress(self): # Zero tails ## Left tail _test_equal_rand( Cont([0, 1, 2, 3], [0, 0, 0, 2]).compress(), Cont([2, 3], [0, 2]) ) _test_equal_rand( Cont([0, 1, 2, 3], [0, 0, 1, 0]).compress(), Cont([1, 2, 3], [0, 1, 0]) ) ## Right tail _test_equal_rand( Cont([0, 1, 2, 3], [2, 0, 0, 0]).compress(), Cont([0, 1], [2, 0]) ) _test_equal_rand( Cont([0, 1, 2, 3], [0, 1, 0, 0]).compress(), Cont([0, 1, 2], [0, 1, 0]) ) ## Both tails _test_equal_rand( Cont([0, 1, 2, 3, 4], [0, 0, 1, 0, 0]).compress(), Cont([1, 2, 3], [0, 1, 0]), ) # Extra linearity ## Non-zero slope _test_equal_rand( Cont([0, 1, 2, 3, 4], [0.5, 0.25, 0, 0.25, 0.5]).compress(), Cont([0, 2, 4], [0.5, 0, 0.5]), ) ## Zero slope, non-zero y _test_equal_rand( Cont([0, 1, 2], [0.5, 0.5, 0.5]).compress(), Cont([0, 2], [0.5, 0.5]) ) ## Zero slope, zero y, outside of tails _test_equal_rand( Cont([0, 1, 2, 3, 4], [1, 0, 0, 0, 1]).compress(), Cont([0, 1, 3, 4], [1, 0, 0, 1]), ) # All features _test_equal_rand( Cont(np.arange(14), [0, 0, 0, 1, 2, 2, 2, 1, 0, 0, 0, 1, 0, 0]).compress(), Cont([2, 4, 6, 8, 10, 11, 12], [0, 2, 2, 0, 0, 1, 0]), ) # If nothing to compress, self should be returned rv = Cont([0, 1], [1, 1]) assert rv.compress() is rv def test_ground(self): w = config.small_width # Basic usage rv = Cont([0, 1], [1, 1]) _test_equal_rand( rv.ground(), Cont([-w, 0, w, 1 - w, 1, 1 + w], [0, 0.5, 1, 1, 0.5, 0]) ) # Argument `direction` _test_equal_rand( rv.ground(direction="both"), Cont([-w, 0, w, 1 - w, 1, 1 + w], [0, 0.5, 1, 1, 0.5, 0]), ) _test_equal_rand( rv.ground(direction="left"), Cont([-w, 0, w, 1], [0, 0.5, 1, 1]) ) _test_equal_rand( rv.ground(direction="right"), Cont([0, 1 - w, 1, 1 + w], [1, 1, 0.5, 0]) ) _test_equal_rand(rv.ground(direction="none"), rv) # Argument `w` w2 = 0.1 _test_equal_rand( rv.ground(w=w2, direction="both"), Cont([-w2, 0, w2, 1 - w2, 1, 1 + w2], [0, 0.5, 1, 1, 0.5, 0]), ) # Close neighbors rv2 = Cont([0, 0.25 * w, 0.5, 1 - 0.1 * w, 1], [1, 1, 1, 1, 1]) rv2_grounded = rv2.ground(direction="both") ## Check that only outer points were added assert_array_equal(rv2_grounded.x[1:-1], rv2.x) ## Check that grounded actually happend assert_array_equal(rv2_grounded.y[[0, -1]], 0.0) ## Check that non-edge x-values havae same y-values assert_array_equal(rv2_grounded.pdf(rv2.x[1:-1]), rv2.pdf(rv2.x[1:-1])) def test_ground_options(self): rv = Cont([0, 1], [1, 1]) with config.context({"small_width": 0.1}): w = config.small_width _test_equal_rand( rv.ground(), Cont([-w, 0, w, 1 - w, 1, 1 + w], [0, 0.5, 1, 1, 0.5, 0]) ) def test_ground_errors(self): rv = Cont([0, 1], [1, 1]) with pytest.raises(ValueError, match="one of"): rv.ground(direction="aaa") def test__coeffs_by_ind(self): # All coefficients are returned if no `ind` is specified rv = Cont([0, 1, 2], [0, 1, 0]) inter, slope = rv._coeffs_by_ind() assert_array_equal(inter, [0, 2]) assert_array_equal(slope, [1, -1]) def test__grid_by_ind(self): # All grid elements are returned if no `ind` is specified rv = Cont([0, 1, 2], [0, 1, 0]) x_out, y_out, p_out = rv._grid_by_ind() x_ref, y_ref = rv.x, rv.y assert_array_equal(x_out, x_ref) assert_array_equal(y_out, y_ref) def test_pdf_coeffs(self): rv = Cont([0, 1, 2], [0, 1, 0]) x = np.array([-1, 0, 0.5, 1, 1.5, 2, 2.5]) with pytest.raises(ValueError, match="one of"): rv.pdf_coeffs(x, side="a") _test_equal_seq( rv.pdf_coeffs(x), (np.array([0, 0, 0, 2, 2, 2, 0]), np.array([0, 1, 1, -1, -1, -1, 0])), ) _test_equal_seq( rv.pdf_coeffs(x, side="left"), (np.array([0, 0, 0, 0, 2, 2, 0]), np.array([0, 1, 1, 1, -1, -1, 0])), ) _test_equal_seq( rv.pdf_coeffs(np.array([-np.inf, np.nan, np.inf])), (np.array([0, np.nan, 0]), np.array([0, np.nan, 0])), ) def test_from_rv_basic(self): uniform = distrs.uniform norm = distrs.norm # Basic usage rv_unif = Cont.from_rv(uniform) rv_unif_test = Cont(x=[0, 1], y=[1, 1]) _test_equal_rand(rv_unif, rv_unif_test, decimal=DECIMAL) # Objects of `Rand` class should be `convert()`ed _test_from_rv_rand(cls=Cont, to_class="Cont") # Forced support edges rv_right = Cont.from_rv(uniform, supp=(0.5, None)) rv_right_test = Cont([0.5, 1], [2, 2]) _test_equal_rand(rv_right, rv_right_test, decimal=DECIMAL) rv_left = Cont.from_rv(uniform, supp=(None, 0.5)) rv_left_test = Cont([0, 0.5], [2, 2]) _test_equal_rand(rv_left, rv_left_test, decimal=DECIMAL) rv_mid = Cont.from_rv(uniform, supp=(0.25, 0.75)) rv_mid_test = Cont([0.25, 0.75], [2, 2]) _test_equal_rand(rv_mid, rv_mid_test, decimal=DECIMAL) def test_from_rv_errors(self): # Absence of either `cdf` or `ppf` method should result intro error class Tmp: pass tmp1 = Tmp() tmp1.ppf = lambda x: np.where((0 <= x) & (x <= 1), 1, 0) with pytest.raises(ValueError, match="cdf"): Cont.from_rv(tmp1) tmp2 = Tmp() tmp2.cdf = lambda x: np.where((0 <= x) & (x <= 1), 1, 0) with pytest.raises(ValueError, match="ppf"): Cont.from_rv(tmp2) def test_from_rv_options(self): norm = distrs.norm # Finite support detection and usage of `small_prob` option with config.context({"small_prob": 1e-6}): rv_norm = Cont.from_rv(norm) assert_array_almost_equal( rv_norm.support(), norm.ppf([1e-6, 1 - 1e-6]), decimal=DECIMAL ) with config.context({"small_prob": 1e-6}): rv_norm_right = Cont.from_rv(norm, supp=(-1, None)) assert_array_almost_equal( rv_norm_right.support(), [-1, norm.ppf(1 - 1e-6)], decimal=DECIMAL ) with config.context({"small_prob": 1e-6}): rv_norm_left = Cont.from_rv(norm, supp=(None, 1)) assert_array_almost_equal( rv_norm_left.support(), [norm.ppf(1e-6), 1], decimal=DECIMAL ) # Usage of `n_grid` option with config.context({"n_grid": 11}): rv_norm_small = Cont.from_rv(norm) assert len(rv_norm_small.x) <= 20 # Usage of `cdf_tolerance` option with config.context({"cdf_tolerance": 1e-4}): rv_norm_1 = Cont.from_rv(norm) with config.context({"cdf_tolerance": 1e-1}): rv_norm_2 = Cont.from_rv(norm) ## Increasing CDF tolerance should lead to decrease of density grid assert len(rv_norm_1.x) > len(rv_norm_2.x) def test_from_sample_basic(self): norm = distrs.norm() rng = np.random.default_rng(101) x = norm.rvs(100, random_state=rng) rv = Cont.from_sample(x) assert isinstance(rv, Cont) def test_from_sample_errors(self): with pytest.raises(TypeError, match="numpy array with float"): Cont.from_sample(["a"]) with pytest.raises(ValueError, match="1d"): Cont.from_sample([[1], [2]]) def test_from_sample_options(self): norm = distrs.norm() rng = np.random.default_rng(101) x = norm.rvs(100, random_state=rng) # "estimator_cont" def uniform_estimator(x): x_min, x_max = x.min(), x.max() def res(x): return np.where((x >= x_min) & (x <= x_max), 1 / (x_max - x_min), 0) return res with config.context({"estimator_cont": uniform_estimator}): rv = Cont.from_sample(x) assert len(rv.y) == 2 assert np.allclose(rv.y, rv.y[0], atol=1e-13) # "estimator_cont" which returns allowed classes ## `Rand` class should be forwarded to `from_rv()` method _test_from_sample_rand( cls=Cont, sample=x, estimator_option="estimator_cont", ) ## "Scipy" distribution should be forwarded to `Cont.from_rv()` rv_norm = distrs.norm() with config.context({"estimator_cont": lambda x: rv_norm}): rv = Cont.from_sample(np.asarray([0, 1, 2])) rv_ref = Cont.from_rv(rv_norm) _test_equal_rand(rv, rv_ref) # "density_mincoverage" with config.context({"density_mincoverage": 0.0}): rv = Cont.from_sample(x) ## With minimal density mincoverage output range should be equal to ## sample range assert_array_equal(rv.x[[0, -1]], [x.min(), x.max()]) # "n_grid" with config.context({"n_grid": 11}): rv = Cont.from_sample(x) assert len(rv.x) <= 22 # "cdf_tolerance" with config.context({"cdf_tolerance": 2.0}): rv = Cont.from_sample(x) ## With very high CDF tolerance downgridding should result into grid ## with three elements. That is because CDF is approximated with ## simplest quadratic spline with single segment. That requires three ## knots. assert len(rv.x) == 3 @pytest.mark.slow def test_from_sample_single_value(self): """How well `from_sample()` handles single unique value in sample Main problem here is how density range is initialized during estimation. """ zero_vec = np.zeros(10) # Default density estimator can't handle situation with single unique # sample value (gives `LinAlgError: singular matrix`). # Case when sample width is zero but density is not zero density_centered_interval = make_circ_density([(-1, 1)]) with config.context({"estimator_cont": lambda x: density_centered_interval}): assert from_sample_cdf_max_error(zero_vec) <= 1e-4 # Case when both sample width and density are zero density_shifted_interval = make_circ_density([(10, 20)]) with config.context({"estimator_cont": lambda x: density_shifted_interval}): # Here currently the problem is that support is estimated way to # wide with very small (~1e-9) non-zero density outside of [10, # 20]. However, CDFs are still close. assert from_sample_cdf_max_error(zero_vec) <= 2e-4 def test_pdf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) # Regular checks x = np.array([-1, 0, 0.5, 1, 2, 3, 4]) assert_array_equal(rv.pdf(x), np.array([0, 0.5, 0.5, 0.5, 0.25, 0, 0])) # Coercion of not ndarray input _test_input_coercion(rv.pdf, x) # Input around edges x = np.array([0 - 1e-10, 0 + 1e-10, 3 - 1e-10, 3 + 1e-10]) assert_array_almost_equal( rv.pdf(x), np.array([0, 0.5, 0.25e-10, 0]), decimal=DECIMAL ) # Bad input x = np.array([-np.inf, np.nan, np.inf]) assert_array_equal(rv.pdf(x), np.array([0, np.nan, 0])) # Dirac-like random variable rv_dirac = Cont([10 - h, 10, 10 + h], [0, 1, 0]) x = np.array([10 - h, 10 - 0.5e-8, 10, 10 + 0.5e-8, 10 + h]) ## Accuracy is of order of 10 due to extreme magnitudes of values assert_array_almost_equal( rv_dirac.pdf(x), np.array([0, 0.5e8, 1e8, 0.5e8, 0]), decimal=-1 ) # Broadcasting x = np.array([[-1, 0.5], [2, 4]]) assert_array_equal(rv.pdf(x), np.array([[0.0, 0.5], [0.25, 0.0]])) # One value input _test_one_value_input(rv.pdf, 0.5) _test_one_value_input(rv.pdf, -1) _test_one_value_input(rv.pdf, np.nan) def test_logpdf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) _test_log_fun(rv.logpdf, rv.pdf, x_ref=[-1, 0.1, 3, np.inf, np.nan]) def test_pmf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) with pytest.raises(AttributeError, match=r"Use `pdf\(\)`"): rv.pmf(0) def test_logpmf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) with pytest.raises(AttributeError, match=r"Use `logpdf\(\)`"): rv.logpmf(0) def test_cdf(self): rv_1 = Cont([0, 1, 2], [0, 1, 0]) # Regular checks x = np.array([-1, 0, 0.5, 1, 1.5, 2, 3]) assert_array_equal(rv_1.cdf(x), np.array([0, 0, 0.125, 0.5, 0.875, 1, 1])) # Coercion of not ndarray input _test_input_coercion(rv_1.cdf, x) # Bad input x = np.array([-np.inf, np.nan, np.inf]) assert_array_equal(rv_1.cdf(x), np.array([0, np.nan, 1])) # Dirac-like random variable rv_dirac = Cont([10 - h, 10, 10 + h], [0, 1, 0]) x = np.array([10 - h, 10 - 0.5e-8, 10, 10 + 0.5e-8, 10 + h]) assert_array_almost_equal( rv_dirac.cdf(x), np.array([0, 0.125, 0.5, 0.875, 1]), decimal=DECIMAL ) # Broadcasting x = np.array([[-1, 0.5], [2, 4]]) assert_array_equal(rv_1.cdf(x), np.array([[0.0, 0.125], [1.0, 1.0]])) # One value input _test_one_value_input(rv_1.cdf, 0.5) _test_one_value_input(rv_1.cdf, -1) _test_one_value_input(rv_1.cdf, np.nan) def test_logcdf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) _test_log_fun(rv.logcdf, rv.cdf, x_ref=[-1, 0.1, 3, np.inf, np.nan]) def test_sf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) x_ref = [-1, 0.1, 3, np.inf, np.nan] assert_array_equal(rv.sf(x_ref), 1 - rv.cdf(x_ref)) def test_logsf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) _test_log_fun(rv.logsf, rv.sf, x_ref=[-1, 0.1, 3, np.inf, np.nan]) def test_ppf(self): # `ppf()` method should be inverse to `cdf()` for every sensible input rv_1 = Cont([0, 1, 2], [0, 1, 0]) # Regular checks q = np.array([0, 0.125, 0.5, 0.875, 1]) assert_array_equal(rv_1.ppf(q), np.array([0, 0.5, 1, 1.5, 2])) # Coercion of not ndarray input _test_input_coercion(rv_1.ppf, q) # Bad input q = np.array([-np.inf, -h, np.nan, 1 + h, np.inf]) assert_array_equal( rv_1.ppf(q), np.array([np.nan, np.nan, np.nan, np.nan, np.nan]) ) # Dirac-like random variable rv_dirac = Cont([10 - h, 10, 10 + h], [0, 1, 0]) q = np.array([0, 0.125, 0.5, 0.875, 1]) assert_array_almost_equal( rv_dirac.ppf(q), np.array([10 - h, 10 - 0.5e-8, 10, 10 + 0.5e-8, 10 + h]), decimal=DECIMAL, ) # Broadcasting q = np.array([[0, 0.5], [0.0, 1.0]]) assert_array_equal(rv_1.ppf(q), np.array([[0.0, 1.0], [0.0, 2.0]])) # One value input _test_one_value_input(rv_1.ppf, 0.25) _test_one_value_input(rv_1.ppf, -1) _test_one_value_input(rv_1.ppf, np.nan) # Should return the smallest x-value in case of zero-density interval(s) rv_zero_density = Cont([0, 1, 2, 3, 4, 5, 6], [0, 0.5, 0, 0, 0, 0.5, 0]) assert rv_zero_density.ppf(0.5) == 2 def test_isf(self): rv = Cont([0, 1, 2], [0, 1, 0]) # Regular checks q_ref = np.array([0, 0.125, 0.5, 0.875, 1]) assert_array_equal(rv.sf(rv.isf(q_ref)), q_ref) def test_rvs(self): rv_1 = Cont([0, 1, 2], [0, 1, 0]) _test_rvs_method(rv_1) def test__cdf_spline(self): rv = Cont([0, 1, 2], [0, 1, 0]) x = [-10, 0, 0.5, 1, 1.5, 2, 10] assert_array_equal(rv._cdf_spline(x), rv.cdf(x)) def test_integrate_cdf(self): rv = Cont([0, 1, 2], [0, 1, 0]) assert np.allclose(rv.integrate_cdf(-10, 10), quad(rv.cdf, -10, 10)[0]) def test_convert(self): import randomvars._boolean as bool import randomvars._discrete as disc import randomvars._mixture as mixt rv = Cont([0, 1, 2], [0, 1, 0]) # By default and supplying `None` should return self assert rv.convert() is rv assert rv.convert(None) is rv # Converting to Bool should result into boolean with probability of # `False` being 0 (because probability of continuous RV being exactly # zero is 0). out_bool = rv.convert("Bool") assert isinstance(out_bool, bool.Bool) assert out_bool.prob_true == 1.0 # Converting to own class should return self out_cont = rv.convert("Cont") assert out_cont is rv # Converting to Disc should result into discrete RV with the same `x` # values as in input's xy-grid out_disc = rv.convert("Disc") assert isinstance(out_disc, disc.Disc) assert_array_equal(out_disc.x, rv.x) # Converting to Mixt should result into degenerate mixture with only # continuous component out_mixt = rv.convert("Mixt") assert isinstance(out_mixt, mixt.Mixt) assert out_mixt.cont is rv assert out_mixt.weight_cont == 1.0 # Any other target class should result into error with pytest.raises(ValueError, match="one of"): rv.convert("aaa") class TestFromRVAccuracy: """Accuracy of `Cont.from_rv()`""" # Output of `from_rv()` should have CDF that differs from original CDF by # no more than `thres` @pytest.mark.slow @pytest.mark.parametrize( "distr_dict,thres", [ (DISTRIBUTIONS_COMMON, 1e-4), (DISTRIBUTIONS_INF_DENSITY, 1e-3), (DISTRIBUTIONS_HEAVY_TAILS, 5e-3), ], ) def test_cdf_maxerror(self, distr_dict, thres): test_passed = { name: TestFromRVAccuracy.from_rv_cdf_maxerror(distr) <= thres for name, distr in distr_dict.items() } assert all(test_passed.values()) def test_detected_support(self): """Test correct trimming of zero tails""" rv_ref = Cont([0, 1, 2, 3, 4], [0, 0, 1, 0, 0]) rv_out = Cont.from_rv(declass(rv_ref)) _test_equal_rand(rv_out, rv_ref.compress(), decimal=4) @staticmethod def from_rv_cdf_maxerror(rv_base, n_inner_points=10, **kwargs): rv_test = Cont.from_rv(rv_base, **kwargs) x_grid = augment_grid(rv_test.x, n_inner_points) err = rv_base.cdf(x_grid) - rv_test.cdf(x_grid) return np.max(np.abs(err)) class TestFromSampleAccuracy: """Accuracy of `Cont.from_sample()`""" # Output of `from_sample()` should differ from original density estimate by # no more than `thres` (with default density estimator) @pytest.mark.slow @pytest.mark.parametrize( "distr_dict,thres", [ (DISTRIBUTIONS_COMMON, 1e-4), (DISTRIBUTIONS_INF_DENSITY, 1.5e-4), (DISTRIBUTIONS_HEAVY_TAILS, 1e-4), ], ) def test_close_cdf(self, distr_dict, thres): rng = np.random.default_rng(101) test_passed = { name: TestFromSampleAccuracy.simulated_cdf_error(distr, rng) <= thres for name, distr in distr_dict.items() } assert all(test_passed.values()) @pytest.mark.slow def test_density_range(self): density_mincoverage = config.density_mincoverage estimator_cont = config.estimator_cont rng = np.random.default_rng(101) def generate_density_coverage(distr): x = distr.rvs(size=100, random_state=rng) density = estimator_cont(x) rv = Cont.from_sample(x) return quad(density, rv.x[0], rv.x[-1])[0] test_passed = { distr_name: generate_density_coverage(distr) >= density_mincoverage for distr_name, distr in DISTRIBUTIONS.items() } assert all(test_passed.values()) @staticmethod def simulated_cdf_error(distr, rng): x = distr.rvs(size=100, random_state=rng) # Testing with `gaussian_kde` as the most used density estimator. This # also enables to use rather fast way of computing CDF of estimated # density via `integrate_box_1d` method. with config.context({"estimator_cont": gaussian_kde}): rv = Cont.from_sample(x) density = config.estimator_cont(x) x_grid = augment_grid(rv.x, 10) # Interestingly enough, direct computation with `-np.inf` as left # integration limit is both accurate and more efficient than computing # integrals for each segment and then use `np.cumsum()`. Probably this # is because integration of gaussian curves with infinite left limit is # done directly through gaussian CDF. cdf_grid = np.array( [density.integrate_box_1d(-np.inf, cur_x) for cur_x in x_grid] ) err = cdf_grid - rv.cdf(x_grid) return np.max(np.abs(err)) def test__extend_range(): def extra_estimator(x): x_min, x_max = x.min(), x.max() prob_height = 1 / (x_max - x_min + 1) def res(x): return np.where( ((x_min < x) & (x < x_max)) | ((x_max + 1 < x) & (x < x_max + 2)), prob_height, 0, ) return res norm = distrs.norm() rng = np.random.default_rng(101) x = norm.rvs(100, random_state=rng) with config.context({"estimator_cont": extra_estimator}): rv = Cont.from_sample(x) assert (rv.x[0] <= x.min()) and (rv.x[-1] >= x.max())
33.791425
87
0.56145
import numpy as np from numpy.testing import assert_array_equal, assert_array_almost_equal import scipy.stats.distributions as distrs from scipy.stats.kde import gaussian_kde from scipy.integrate import quad import pytest from randomvars._continuous import Cont from randomvars.tests.commontests import ( DECIMAL, _test_equal_rand, _test_equal_seq, _test_from_rv_rand, _test_from_sample_rand, _test_input_coercion, _test_log_fun, _test_one_value_input, _test_rvs_method, declass, h, ) from randomvars.options import config DISTRIBUTIONS_COMMON = { "beta": distrs.beta(a=10, b=20), "chi_sq": distrs.chi2(df=10), "expon": distrs.expon(), "f": distrs.f(dfn=20, dfd=20), "gamma": distrs.gamma(a=10), "laplace": distrs.laplace(), "lognorm": distrs.lognorm(s=0.5), "norm": distrs.norm(), "norm2": distrs.norm(loc=10), "norm3": distrs.norm(scale=0.1), "norm4": distrs.norm(scale=10), "norm5": distrs.norm(loc=10, scale=0.1), "t": distrs.t(df=10), "uniform": distrs.uniform(), "uniform2": distrs.uniform(loc=10, scale=0.1), "weibull_max": distrs.weibull_max(c=2), "weibull_min": distrs.weibull_min(c=2), } DISTRIBUTIONS_INF_DENSITY = { "inf_beta_both": distrs.beta(a=0.4, b=0.6), "inf_beta_left": distrs.beta(a=0.5, b=2), "inf_beta_right": distrs.beta(a=2, b=0.5), "inf_chi_sq": distrs.chi2(df=1), "inf_weibull_max": distrs.weibull_max(c=0.5), "inf_weibull_min": distrs.weibull_min(c=0.5), } DISTRIBUTIONS_HEAVY_TAILS = { "heavy_cauchy": distrs.cauchy(), "heavy_lognorm": distrs.lognorm(s=1), "heavy_t": distrs.t(df=2), } DISTRIBUTIONS = { **DISTRIBUTIONS_COMMON, **DISTRIBUTIONS_HEAVY_TAILS, **DISTRIBUTIONS_INF_DENSITY, } def augment_grid(x, n_inner_points): test_arr = [ np.linspace(x[i], x[i + 1], n_inner_points + 1, endpoint=False) for i in np.arange(len(x) - 1) ] test_arr.append([x[-1]]) return np.concatenate(test_arr) def from_sample_cdf_max_error(x): rv = Cont.from_sample(x) density = config.estimator_cont(x) x_grid = augment_grid(rv.x, 10) x_grid_ext = np.concatenate([[-np.inf], x_grid]) cdf_intervals = np.array( [ quad(density, x_l, x_r)[0] for x_l, x_r in zip(x_grid_ext[:-1], x_grid_ext[1:]) ] ) cdf_grid = np.cumsum(cdf_intervals) err = cdf_grid - rv.cdf(x_grid) return np.max(np.abs(err)) def circle_fun(x, low, high): x = np.array(x) center = 0.5 * (high + low) radius = 0.5 * (high - low) res = np.zeros_like(x) center_dist = np.abs(x - center) is_in = center_dist <= radius res[is_in] = np.sqrt(radius ** 2 - center_dist[is_in] ** 2) return res def make_circ_density(intervals): def density(x): res = np.zeros_like(x) tot_integral = 0 for low, high in intervals: res += circle_fun(x, low, high) tot_integral += np.pi * (high - low) ** 2 / 8 return res / tot_integral return density class TestCont: def test_init_errors(self): def check_one_input(def_args, var): with pytest.raises(TypeError, match=f"`{var}`.*numpy array"): def_args[var] = {"a": None} Cont(**def_args) with pytest.raises(TypeError, match=f"`{var}`.*float"): def_args[var] = ["a", "a"] Cont(**def_args) with pytest.raises(TypeError, match=f"`{var}`.*finite values"): def_args[var] = [0, np.nan] Cont(**def_args) with pytest.raises(TypeError, match=f"`{var}`.*finite values"): def_args[var] = [0, np.inf] Cont(**def_args) with pytest.raises(ValueError, match=f"`{var}`.*1d array"): def_args[var] = [[0, 1]] Cont(**def_args) check_one_input({"y": [1, 1]}, "x") check_one_input({"x": [0, 1]}, "y") with pytest.raises(ValueError, match="[Ll]engths.*match"): Cont([0, 1], [1, 1, 1]) with pytest.raises(ValueError, match="two"): Cont([1], [1]) with pytest.warns(UserWarning, match="`x`.*not sorted.*`x` and `y`"): rv = Cont([1, 0], [0, 2]) rv_ref = Cont([0, 1], [2, 0]) _test_equal_rand(rv, rv_ref) with pytest.raises(ValueError, match="`y`.*negative"): Cont([0, 1], [1, -1]) with pytest.raises(ValueError, match="`y`.*no positive"): Cont([0, 1], [0, 0]) def test_init(self): x_ref = np.array([0, 1, 2]) y_ref = np.array([0, 1, 0]) rv_ref = Cont(x_ref, y_ref) rv_1 = Cont(x=x_ref.tolist(), y=y_ref.tolist()) _test_equal_rand(rv_1, rv_ref) rv_2 = Cont(x=x_ref, y=10 * y_ref) _test_equal_rand(rv_2, rv_ref) with pytest.warns(UserWarning, match="`x`.*not sorted"): rv_3 = Cont(x=x_ref[[1, 0, 2]], y=y_ref[[1, 0, 2]]) _test_equal_rand(rv_3, rv_ref) with pytest.warns(UserWarning, match="duplicated"): rv_4 = Cont(x=x_ref[[0, 1, 1, 2]], y=y_ref[[0, 1, 2, 2]]) _test_equal_rand(rv_4, rv_ref) def test_str(self): rv = Cont([0, 2, 4], [0, 1, 0]) assert str(rv) == "Continuous RV with 2 intervals (support: [0.0, 4.0])" rv = Cont([0, 1], [1, 1]) assert str(rv) == "Continuous RV with 1 interval (support: [0.0, 1.0])" def test_properties(self): x = np.arange(11) y = np.repeat(0.1, 11) rv = Cont(x, y) assert list(rv.params.keys()) == ["x", "y"] assert_array_equal(rv.params["x"], x) assert_array_equal(rv.params["y"], y) assert_array_equal(rv.x, x) assert_array_equal(rv.y, y) assert rv.a == 0.0 assert rv.b == 10.0 def test_support(self): rv = Cont([0.5, 1.5, 4.5], [0, 0.5, 0]) assert rv.support() == (0.5, 4.5) def test_compress(self): est_equal_rand( Cont([0, 1, 2, 3], [0, 0, 0, 2]).compress(), Cont([2, 3], [0, 2]) ) _test_equal_rand( Cont([0, 1, 2, 3], [0, 0, 1, 0]).compress(), Cont([1, 2, 3], [0, 1, 0]) ) st_equal_rand( Cont([0, 1, 2, 3], [2, 0, 0, 0]).compress(), Cont([0, 1], [2, 0]) ) _test_equal_rand( Cont([0, 1, 2, 3], [0, 1, 0, 0]).compress(), Cont([0, 1, 2], [0, 1, 0]) ) st_equal_rand( Cont([0, 1, 2, 3, 4], [0, 0, 1, 0, 0]).compress(), Cont([1, 2, 3], [0, 1, 0]), ) qual_rand( Cont([0, 1, 2, 3, 4], [0.5, 0.25, 0, 0.25, 0.5]).compress(), Cont([0, 2, 4], [0.5, 0, 0.5]), ) d( Cont([0, 1, 2], [0.5, 0.5, 0.5]).compress(), Cont([0, 2], [0.5, 0.5]) ) Cont([0, 1, 2, 3, 4], [1, 0, 0, 0, 1]).compress(), Cont([0, 1, 3, 4], [1, 0, 0, 1]), ) _test_equal_rand( Cont(np.arange(14), [0, 0, 0, 1, 2, 2, 2, 1, 0, 0, 0, 1, 0, 0]).compress(), Cont([2, 4, 6, 8, 10, 11, 12], [0, 2, 2, 0, 0, 1, 0]), ) rv = Cont([0, 1], [1, 1]) assert rv.compress() is rv def test_ground(self): w = config.small_width rv = Cont([0, 1], [1, 1]) _test_equal_rand( rv.ground(), Cont([-w, 0, w, 1 - w, 1, 1 + w], [0, 0.5, 1, 1, 0.5, 0]) ) _test_equal_rand( rv.ground(direction="both"), Cont([-w, 0, w, 1 - w, 1, 1 + w], [0, 0.5, 1, 1, 0.5, 0]), ) _test_equal_rand( rv.ground(direction="left"), Cont([-w, 0, w, 1], [0, 0.5, 1, 1]) ) _test_equal_rand( rv.ground(direction="right"), Cont([0, 1 - w, 1, 1 + w], [1, 1, 0.5, 0]) ) _test_equal_rand(rv.ground(direction="none"), rv) w2 = 0.1 _test_equal_rand( rv.ground(w=w2, direction="both"), Cont([-w2, 0, w2, 1 - w2, 1, 1 + w2], [0, 0.5, 1, 1, 0.5, 0]), ) rv2 = Cont([0, 0.25 * w, 0.5, 1 - 0.1 * w, 1], [1, 1, 1, 1, 1]) rv2_grounded = rv2.ground(direction="both") x[1:-1], rv2.x) ed.y[[0, -1]], 0.0) [1:-1]), rv2.pdf(rv2.x[1:-1])) def test_ground_options(self): rv = Cont([0, 1], [1, 1]) with config.context({"small_width": 0.1}): w = config.small_width _test_equal_rand( rv.ground(), Cont([-w, 0, w, 1 - w, 1, 1 + w], [0, 0.5, 1, 1, 0.5, 0]) ) def test_ground_errors(self): rv = Cont([0, 1], [1, 1]) with pytest.raises(ValueError, match="one of"): rv.ground(direction="aaa") def test__coeffs_by_ind(self): rv = Cont([0, 1, 2], [0, 1, 0]) inter, slope = rv._coeffs_by_ind() assert_array_equal(inter, [0, 2]) assert_array_equal(slope, [1, -1]) def test__grid_by_ind(self): rv = Cont([0, 1, 2], [0, 1, 0]) x_out, y_out, p_out = rv._grid_by_ind() x_ref, y_ref = rv.x, rv.y assert_array_equal(x_out, x_ref) assert_array_equal(y_out, y_ref) def test_pdf_coeffs(self): rv = Cont([0, 1, 2], [0, 1, 0]) x = np.array([-1, 0, 0.5, 1, 1.5, 2, 2.5]) with pytest.raises(ValueError, match="one of"): rv.pdf_coeffs(x, side="a") _test_equal_seq( rv.pdf_coeffs(x), (np.array([0, 0, 0, 2, 2, 2, 0]), np.array([0, 1, 1, -1, -1, -1, 0])), ) _test_equal_seq( rv.pdf_coeffs(x, side="left"), (np.array([0, 0, 0, 0, 2, 2, 0]), np.array([0, 1, 1, 1, -1, -1, 0])), ) _test_equal_seq( rv.pdf_coeffs(np.array([-np.inf, np.nan, np.inf])), (np.array([0, np.nan, 0]), np.array([0, np.nan, 0])), ) def test_from_rv_basic(self): uniform = distrs.uniform norm = distrs.norm rv_unif = Cont.from_rv(uniform) rv_unif_test = Cont(x=[0, 1], y=[1, 1]) _test_equal_rand(rv_unif, rv_unif_test, decimal=DECIMAL) _test_from_rv_rand(cls=Cont, to_class="Cont") rv_right = Cont.from_rv(uniform, supp=(0.5, None)) rv_right_test = Cont([0.5, 1], [2, 2]) _test_equal_rand(rv_right, rv_right_test, decimal=DECIMAL) rv_left = Cont.from_rv(uniform, supp=(None, 0.5)) rv_left_test = Cont([0, 0.5], [2, 2]) _test_equal_rand(rv_left, rv_left_test, decimal=DECIMAL) rv_mid = Cont.from_rv(uniform, supp=(0.25, 0.75)) rv_mid_test = Cont([0.25, 0.75], [2, 2]) _test_equal_rand(rv_mid, rv_mid_test, decimal=DECIMAL) def test_from_rv_errors(self): class Tmp: pass tmp1 = Tmp() tmp1.ppf = lambda x: np.where((0 <= x) & (x <= 1), 1, 0) with pytest.raises(ValueError, match="cdf"): Cont.from_rv(tmp1) tmp2 = Tmp() tmp2.cdf = lambda x: np.where((0 <= x) & (x <= 1), 1, 0) with pytest.raises(ValueError, match="ppf"): Cont.from_rv(tmp2) def test_from_rv_options(self): norm = distrs.norm with config.context({"small_prob": 1e-6}): rv_norm = Cont.from_rv(norm) assert_array_almost_equal( rv_norm.support(), norm.ppf([1e-6, 1 - 1e-6]), decimal=DECIMAL ) with config.context({"small_prob": 1e-6}): rv_norm_right = Cont.from_rv(norm, supp=(-1, None)) assert_array_almost_equal( rv_norm_right.support(), [-1, norm.ppf(1 - 1e-6)], decimal=DECIMAL ) with config.context({"small_prob": 1e-6}): rv_norm_left = Cont.from_rv(norm, supp=(None, 1)) assert_array_almost_equal( rv_norm_left.support(), [norm.ppf(1e-6), 1], decimal=DECIMAL ) with config.context({"n_grid": 11}): rv_norm_small = Cont.from_rv(norm) assert len(rv_norm_small.x) <= 20 with config.context({"cdf_tolerance": 1e-4}): rv_norm_1 = Cont.from_rv(norm) with config.context({"cdf_tolerance": 1e-1}): rv_norm_2 = Cont.from_rv(norm) from_sample_basic(self): norm = distrs.norm() rng = np.random.default_rng(101) x = norm.rvs(100, random_state=rng) rv = Cont.from_sample(x) assert isinstance(rv, Cont) def test_from_sample_errors(self): with pytest.raises(TypeError, match="numpy array with float"): Cont.from_sample(["a"]) with pytest.raises(ValueError, match="1d"): Cont.from_sample([[1], [2]]) def test_from_sample_options(self): norm = distrs.norm() rng = np.random.default_rng(101) x = norm.rvs(100, random_state=rng) def uniform_estimator(x): x_min, x_max = x.min(), x.max() def res(x): return np.where((x >= x_min) & (x <= x_max), 1 / (x_max - x_min), 0) return res with config.context({"estimator_cont": uniform_estimator}): rv = Cont.from_sample(x) assert len(rv.y) == 2 assert np.allclose(rv.y, rv.y[0], atol=1e-13) sample=x, estimator_option="estimator_cont", ) "estimator_cont": lambda x: rv_norm}): rv = Cont.from_sample(np.asarray([0, 1, 2])) rv_ref = Cont.from_rv(rv_norm) _test_equal_rand(rv, rv_ref) with config.context({"density_mincoverage": 0.0}): rv = Cont.from_sample(x) with config.context({"n_grid": 11}): rv = Cont.from_sample(x) assert len(rv.x) <= 22 with config.context({"cdf_tolerance": 2.0}): rv = Cont.from_sample(x) Error: singular matrix`). # Case when sample width is zero but density is not zero density_centered_interval = make_circ_density([(-1, 1)]) with config.context({"estimator_cont": lambda x: density_centered_interval}): assert from_sample_cdf_max_error(zero_vec) <= 1e-4 # Case when both sample width and density are zero density_shifted_interval = make_circ_density([(10, 20)]) with config.context({"estimator_cont": lambda x: density_shifted_interval}): # Here currently the problem is that support is estimated way to # wide with very small (~1e-9) non-zero density outside of [10, # 20]. However, CDFs are still close. assert from_sample_cdf_max_error(zero_vec) <= 2e-4 def test_pdf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) # Regular checks x = np.array([-1, 0, 0.5, 1, 2, 3, 4]) assert_array_equal(rv.pdf(x), np.array([0, 0.5, 0.5, 0.5, 0.25, 0, 0])) # Coercion of not ndarray input _test_input_coercion(rv.pdf, x) # Input around edges x = np.array([0 - 1e-10, 0 + 1e-10, 3 - 1e-10, 3 + 1e-10]) assert_array_almost_equal( rv.pdf(x), np.array([0, 0.5, 0.25e-10, 0]), decimal=DECIMAL ) # Bad input x = np.array([-np.inf, np.nan, np.inf]) assert_array_equal(rv.pdf(x), np.array([0, np.nan, 0])) # Dirac-like random variable rv_dirac = Cont([10 - h, 10, 10 + h], [0, 1, 0]) x = np.array([10 - h, 10 - 0.5e-8, 10, 10 + 0.5e-8, 10 + h]) ## Accuracy is of order of 10 due to extreme magnitudes of values assert_array_almost_equal( rv_dirac.pdf(x), np.array([0, 0.5e8, 1e8, 0.5e8, 0]), decimal=-1 ) # Broadcasting x = np.array([[-1, 0.5], [2, 4]]) assert_array_equal(rv.pdf(x), np.array([[0.0, 0.5], [0.25, 0.0]])) # One value input _test_one_value_input(rv.pdf, 0.5) _test_one_value_input(rv.pdf, -1) _test_one_value_input(rv.pdf, np.nan) def test_logpdf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) _test_log_fun(rv.logpdf, rv.pdf, x_ref=[-1, 0.1, 3, np.inf, np.nan]) def test_pmf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) with pytest.raises(AttributeError, match=r"Use `pdf\(\)`"): rv.pmf(0) def test_logpmf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) with pytest.raises(AttributeError, match=r"Use `logpdf\(\)`"): rv.logpmf(0) def test_cdf(self): rv_1 = Cont([0, 1, 2], [0, 1, 0]) # Regular checks x = np.array([-1, 0, 0.5, 1, 1.5, 2, 3]) assert_array_equal(rv_1.cdf(x), np.array([0, 0, 0.125, 0.5, 0.875, 1, 1])) # Coercion of not ndarray input _test_input_coercion(rv_1.cdf, x) # Bad input x = np.array([-np.inf, np.nan, np.inf]) assert_array_equal(rv_1.cdf(x), np.array([0, np.nan, 1])) # Dirac-like random variable rv_dirac = Cont([10 - h, 10, 10 + h], [0, 1, 0]) x = np.array([10 - h, 10 - 0.5e-8, 10, 10 + 0.5e-8, 10 + h]) assert_array_almost_equal( rv_dirac.cdf(x), np.array([0, 0.125, 0.5, 0.875, 1]), decimal=DECIMAL ) # Broadcasting x = np.array([[-1, 0.5], [2, 4]]) assert_array_equal(rv_1.cdf(x), np.array([[0.0, 0.125], [1.0, 1.0]])) # One value input _test_one_value_input(rv_1.cdf, 0.5) _test_one_value_input(rv_1.cdf, -1) _test_one_value_input(rv_1.cdf, np.nan) def test_logcdf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) _test_log_fun(rv.logcdf, rv.cdf, x_ref=[-1, 0.1, 3, np.inf, np.nan]) def test_sf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) x_ref = [-1, 0.1, 3, np.inf, np.nan] assert_array_equal(rv.sf(x_ref), 1 - rv.cdf(x_ref)) def test_logsf(self): rv = Cont([0, 1, 3], [0.5, 0.5, 0]) _test_log_fun(rv.logsf, rv.sf, x_ref=[-1, 0.1, 3, np.inf, np.nan]) def test_ppf(self): # `ppf()` method should be inverse to `cdf()` for every sensible input rv_1 = Cont([0, 1, 2], [0, 1, 0]) # Regular checks q = np.array([0, 0.125, 0.5, 0.875, 1]) assert_array_equal(rv_1.ppf(q), np.array([0, 0.5, 1, 1.5, 2])) # Coercion of not ndarray input _test_input_coercion(rv_1.ppf, q) # Bad input q = np.array([-np.inf, -h, np.nan, 1 + h, np.inf]) assert_array_equal( rv_1.ppf(q), np.array([np.nan, np.nan, np.nan, np.nan, np.nan]) ) # Dirac-like random variable rv_dirac = Cont([10 - h, 10, 10 + h], [0, 1, 0]) q = np.array([0, 0.125, 0.5, 0.875, 1]) assert_array_almost_equal( rv_dirac.ppf(q), np.array([10 - h, 10 - 0.5e-8, 10, 10 + 0.5e-8, 10 + h]), decimal=DECIMAL, ) # Broadcasting q = np.array([[0, 0.5], [0.0, 1.0]]) assert_array_equal(rv_1.ppf(q), np.array([[0.0, 1.0], [0.0, 2.0]])) # One value input _test_one_value_input(rv_1.ppf, 0.25) _test_one_value_input(rv_1.ppf, -1) _test_one_value_input(rv_1.ppf, np.nan) # Should return the smallest x-value in case of zero-density interval(s) rv_zero_density = Cont([0, 1, 2, 3, 4, 5, 6], [0, 0.5, 0, 0, 0, 0.5, 0]) assert rv_zero_density.ppf(0.5) == 2 def test_isf(self): rv = Cont([0, 1, 2], [0, 1, 0]) # Regular checks q_ref = np.array([0, 0.125, 0.5, 0.875, 1]) assert_array_equal(rv.sf(rv.isf(q_ref)), q_ref) def test_rvs(self): rv_1 = Cont([0, 1, 2], [0, 1, 0]) _test_rvs_method(rv_1) def test__cdf_spline(self): rv = Cont([0, 1, 2], [0, 1, 0]) x = [-10, 0, 0.5, 1, 1.5, 2, 10] assert_array_equal(rv._cdf_spline(x), rv.cdf(x)) def test_integrate_cdf(self): rv = Cont([0, 1, 2], [0, 1, 0]) assert np.allclose(rv.integrate_cdf(-10, 10), quad(rv.cdf, -10, 10)[0]) def test_convert(self): import randomvars._boolean as bool import randomvars._discrete as disc import randomvars._mixture as mixt rv = Cont([0, 1, 2], [0, 1, 0]) # By default and supplying `None` should return self assert rv.convert() is rv assert rv.convert(None) is rv # Converting to Bool should result into boolean with probability of # `False` being 0 (because probability of continuous RV being exactly # zero is 0). out_bool = rv.convert("Bool") assert isinstance(out_bool, bool.Bool) assert out_bool.prob_true == 1.0 # Converting to own class should return self out_cont = rv.convert("Cont") assert out_cont is rv # Converting to Disc should result into discrete RV with the same `x` # values as in input's xy-grid out_disc = rv.convert("Disc") assert isinstance(out_disc, disc.Disc) assert_array_equal(out_disc.x, rv.x) out_mixt = rv.convert("Mixt") assert isinstance(out_mixt, mixt.Mixt) assert out_mixt.cont is rv assert out_mixt.weight_cont == 1.0 with pytest.raises(ValueError, match="one of"): rv.convert("aaa") class TestFromRVAccuracy: @pytest.mark.slow @pytest.mark.parametrize( "distr_dict,thres", [ (DISTRIBUTIONS_COMMON, 1e-4), (DISTRIBUTIONS_INF_DENSITY, 1e-3), (DISTRIBUTIONS_HEAVY_TAILS, 5e-3), ], ) def test_cdf_maxerror(self, distr_dict, thres): test_passed = { name: TestFromRVAccuracy.from_rv_cdf_maxerror(distr) <= thres for name, distr in distr_dict.items() } assert all(test_passed.values()) def test_detected_support(self): rv_ref = Cont([0, 1, 2, 3, 4], [0, 0, 1, 0, 0]) rv_out = Cont.from_rv(declass(rv_ref)) _test_equal_rand(rv_out, rv_ref.compress(), decimal=4) @staticmethod def from_rv_cdf_maxerror(rv_base, n_inner_points=10, **kwargs): rv_test = Cont.from_rv(rv_base, **kwargs) x_grid = augment_grid(rv_test.x, n_inner_points) err = rv_base.cdf(x_grid) - rv_test.cdf(x_grid) return np.max(np.abs(err)) class TestFromSampleAccuracy: @pytest.mark.slow @pytest.mark.parametrize( "distr_dict,thres", [ (DISTRIBUTIONS_COMMON, 1e-4), (DISTRIBUTIONS_INF_DENSITY, 1.5e-4), (DISTRIBUTIONS_HEAVY_TAILS, 1e-4), ], ) def test_close_cdf(self, distr_dict, thres): rng = np.random.default_rng(101) test_passed = { name: TestFromSampleAccuracy.simulated_cdf_error(distr, rng) <= thres for name, distr in distr_dict.items() } assert all(test_passed.values()) @pytest.mark.slow def test_density_range(self): density_mincoverage = config.density_mincoverage estimator_cont = config.estimator_cont rng = np.random.default_rng(101) def generate_density_coverage(distr): x = distr.rvs(size=100, random_state=rng) density = estimator_cont(x) rv = Cont.from_sample(x) return quad(density, rv.x[0], rv.x[-1])[0] test_passed = { distr_name: generate_density_coverage(distr) >= density_mincoverage for distr_name, distr in DISTRIBUTIONS.items() } assert all(test_passed.values()) @staticmethod def simulated_cdf_error(distr, rng): x = distr.rvs(size=100, random_state=rng) with config.context({"estimator_cont": gaussian_kde}): rv = Cont.from_sample(x) density = config.estimator_cont(x) x_grid = augment_grid(rv.x, 10) cdf_grid = np.array( [density.integrate_box_1d(-np.inf, cur_x) for cur_x in x_grid] ) err = cdf_grid - rv.cdf(x_grid) return np.max(np.abs(err)) def test__extend_range(): def extra_estimator(x): x_min, x_max = x.min(), x.max() prob_height = 1 / (x_max - x_min + 1) def res(x): return np.where( ((x_min < x) & (x < x_max)) | ((x_max + 1 < x) & (x < x_max + 2)), prob_height, 0, ) return res norm = distrs.norm() rng = np.random.default_rng(101) x = norm.rvs(100, random_state=rng) with config.context({"estimator_cont": extra_estimator}): rv = Cont.from_sample(x) assert (rv.x[0] <= x.min()) and (rv.x[-1] >= x.max())
true
true
f711b20c3b60383cb78e430d5bf5b6670492c5e8
267
py
Python
webapp/tests/funcplugins/plugin_bad_paramtype.py
TimWhalen/graphite-web
e150af45e01d01141a8767ec0597e218105b9914
[ "Apache-2.0" ]
1
2021-01-16T20:10:45.000Z
2021-01-16T20:10:45.000Z
webapp/tests/funcplugins/plugin_bad_paramtype.py
TimWhalen/graphite-web
e150af45e01d01141a8767ec0597e218105b9914
[ "Apache-2.0" ]
null
null
null
webapp/tests/funcplugins/plugin_bad_paramtype.py
TimWhalen/graphite-web
e150af45e01d01141a8767ec0597e218105b9914
[ "Apache-2.0" ]
null
null
null
from graphite.functions.params import Param, ParamTypes def test(seriesList): """This is a test function""" return seriesList test.group = 'Test' test.params = [ Param('seriesList', ParamTypes.bad, required=True), ] SeriesFunctions = { 'testFunc': test, }
17.8
55
0.71161
from graphite.functions.params import Param, ParamTypes def test(seriesList): return seriesList test.group = 'Test' test.params = [ Param('seriesList', ParamTypes.bad, required=True), ] SeriesFunctions = { 'testFunc': test, }
true
true
f711b32486269d4237e64b7122aaece024245cb3
6,011
py
Python
iris_validation/interface/svgwrite/validator2.py
FilomenoSanchez/iris-validation
a7bbb28dfe239527c32914229e69e007a519e0dd
[ "MIT" ]
null
null
null
iris_validation/interface/svgwrite/validator2.py
FilomenoSanchez/iris-validation
a7bbb28dfe239527c32914229e69e007a519e0dd
[ "MIT" ]
1
2021-11-18T11:58:59.000Z
2021-11-18T12:00:08.000Z
iris_validation/interface/svgwrite/validator2.py
glycojones/iris-validation
55163a2914cf572af9b52b705188b0b4ec720be2
[ "MIT" ]
null
null
null
#!/usr/bin/env python #coding:utf-8 # Author: mozman --<mozman@gmx.at> # Purpose: validator2 module - new validator module # Created: 01.10.2010 # Copyright (C) 2010, Manfred Moitzi # License: MIT License from .data import full11 from .data import tiny12 from .data import pattern validator_cache = {} def cache_key(profile, debug): return str(profile) + str(debug) def get_validator(profile, debug=True): """ Validator factory """ try: return validator_cache[cache_key(profile, debug)] except KeyError: if profile == 'tiny': validator = Tiny12Validator(debug) elif profile in ('full', 'basic', 'none'): validator = Full11Validator(debug) else: raise ValueError("Unsupported profile: '%s'" % profile) validator_cache[cache_key(profile, debug)] = validator return validator class Tiny12Validator(object): profilename = "Tiny 1.2" def __init__(self, debug=True): self.debug = debug self.attributes = tiny12.attributes self.elements = tiny12.elements self.typechecker = tiny12.TypeChecker() def check_all_svg_attribute_values(self, elementname, attributes): """ Check if attributes are valid for object 'elementname' and all svg attributes have valid types and values. Raises ValueError. """ for attributename, value in attributes.items(): self.check_svg_attribute_value(elementname, attributename, value) def check_svg_attribute_value(self, elementname, attributename, value): """ Check if 'attributename' is valid for object 'elementname' and 'value' is a valid svg type and value. Raises ValueError. """ self._check_valid_svg_attribute_name(elementname, attributename) self._check_svg_value(elementname, attributename, value) def _check_svg_value(self, elementname, attributename, value): """ Checks if 'value' is a valid svg-type for svg-attribute 'attributename' at svg-element 'elementname'. Raises TypeError. """ attribute = self.attributes[attributename] # check if 'value' match a valid datatype for typename in attribute.get_types(elementname): if self.typechecker.check(typename, value): return # check if 'value' is a valid constant valuestr = str(value) if not valuestr in attribute.get_const(elementname): raise TypeError("'%s' is not a valid value for attribute '%s' at svg-element <%s>." % (value, attributename, elementname)) def _check_valid_svg_attribute_name(self, elementname, attributename): """ Check if 'attributename' is a valid svg-attribute for svg-element 'elementname'. Raises ValueError. """ if not self.is_valid_svg_attribute(elementname, attributename): raise ValueError("Invalid attribute '%s' for svg-element <%s>." % (attributename, elementname)) def _get_element(self, elementname): try: return self.elements[elementname] except KeyError: raise KeyError("<%s> is not valid for selected profile: '%s'." % (elementname, self.profilename)) def check_svg_type(self, value, typename='string'): """ Check if 'value' matches svg type 'typename'. Raises TypeError. """ if self.typechecker.check(typename, value): return value else: raise TypeError("%s is not of type '%s'." % (value, typename)) def is_valid_svg_type(self, value, typename): return self.typechecker.check(typename, value) def is_valid_elementname(self, elementname): """ True if 'elementname' is a valid svg-element name. """ return elementname in self.elements def is_valid_svg_attribute(self, elementname, attributename): """ True if 'attributename' is a valid svg-attribute for svg-element 'elementname'. """ element = self._get_element(elementname) return attributename in element.valid_attributes def is_valid_children(self, elementname, childrenname): """ True if svg-element 'childrenname' is a valid children of svg-element 'elementname'. """ element = self._get_element(elementname) return childrenname in element.valid_children def check_valid_children(self, elementname, childrenname): """ Checks if svg-element 'childrenname' is a valid children of svg-element 'elementname'. Raises ValueError. """ if not self.is_valid_children(elementname, childrenname): raise ValueError("Invalid children '%s' for svg-element <%s>." % (childrenname, elementname)) def get_coordinate(self, value): """ Split value in (number, unit) if value has an unit or (number, None). Raises ValueError. """ if value is None: raise TypeError("Invalid type 'None'.") if isinstance(value, (int, float)): result = (value, None) else: result = pattern.coordinate.match(value.strip()) if result: number, tmp, unit = result.groups() number = float(number) else: raise ValueError("'%s' is not a valid svg-coordinate." % value) result = (number, unit) if self.typechecker.is_number(result[0]): return result else: version = "SVG %s %s" % self.typechecker.get_version() raise ValueError("%s is not a valid number for: %s." % (value, version)) get_length = get_coordinate class Full11Validator(Tiny12Validator): profilename = "Full 1.1" def __init__(self, debug=True): self.debug = debug self.attributes = full11.attributes self.elements = full11.elements self.typechecker = full11.TypeChecker()
35.152047
134
0.635335
from .data import full11 from .data import tiny12 from .data import pattern validator_cache = {} def cache_key(profile, debug): return str(profile) + str(debug) def get_validator(profile, debug=True): try: return validator_cache[cache_key(profile, debug)] except KeyError: if profile == 'tiny': validator = Tiny12Validator(debug) elif profile in ('full', 'basic', 'none'): validator = Full11Validator(debug) else: raise ValueError("Unsupported profile: '%s'" % profile) validator_cache[cache_key(profile, debug)] = validator return validator class Tiny12Validator(object): profilename = "Tiny 1.2" def __init__(self, debug=True): self.debug = debug self.attributes = tiny12.attributes self.elements = tiny12.elements self.typechecker = tiny12.TypeChecker() def check_all_svg_attribute_values(self, elementname, attributes): for attributename, value in attributes.items(): self.check_svg_attribute_value(elementname, attributename, value) def check_svg_attribute_value(self, elementname, attributename, value): self._check_valid_svg_attribute_name(elementname, attributename) self._check_svg_value(elementname, attributename, value) def _check_svg_value(self, elementname, attributename, value): attribute = self.attributes[attributename] for typename in attribute.get_types(elementname): if self.typechecker.check(typename, value): return valuestr = str(value) if not valuestr in attribute.get_const(elementname): raise TypeError("'%s' is not a valid value for attribute '%s' at svg-element <%s>." % (value, attributename, elementname)) def _check_valid_svg_attribute_name(self, elementname, attributename): if not self.is_valid_svg_attribute(elementname, attributename): raise ValueError("Invalid attribute '%s' for svg-element <%s>." % (attributename, elementname)) def _get_element(self, elementname): try: return self.elements[elementname] except KeyError: raise KeyError("<%s> is not valid for selected profile: '%s'." % (elementname, self.profilename)) def check_svg_type(self, value, typename='string'): if self.typechecker.check(typename, value): return value else: raise TypeError("%s is not of type '%s'." % (value, typename)) def is_valid_svg_type(self, value, typename): return self.typechecker.check(typename, value) def is_valid_elementname(self, elementname): return elementname in self.elements def is_valid_svg_attribute(self, elementname, attributename): element = self._get_element(elementname) return attributename in element.valid_attributes def is_valid_children(self, elementname, childrenname): element = self._get_element(elementname) return childrenname in element.valid_children def check_valid_children(self, elementname, childrenname): if not self.is_valid_children(elementname, childrenname): raise ValueError("Invalid children '%s' for svg-element <%s>." % (childrenname, elementname)) def get_coordinate(self, value): if value is None: raise TypeError("Invalid type 'None'.") if isinstance(value, (int, float)): result = (value, None) else: result = pattern.coordinate.match(value.strip()) if result: number, tmp, unit = result.groups() number = float(number) else: raise ValueError("'%s' is not a valid svg-coordinate." % value) result = (number, unit) if self.typechecker.is_number(result[0]): return result else: version = "SVG %s %s" % self.typechecker.get_version() raise ValueError("%s is not a valid number for: %s." % (value, version)) get_length = get_coordinate class Full11Validator(Tiny12Validator): profilename = "Full 1.1" def __init__(self, debug=True): self.debug = debug self.attributes = full11.attributes self.elements = full11.elements self.typechecker = full11.TypeChecker()
true
true
f711b35f6d949074210d2633de5514a8461243be
2,123
py
Python
exatomic/interfaces/tests/test_cube.py
tjduigna/exatomic
3e27233084588bc6a58b63fc81aaf5a6b67a968d
[ "Apache-2.0" ]
null
null
null
exatomic/interfaces/tests/test_cube.py
tjduigna/exatomic
3e27233084588bc6a58b63fc81aaf5a6b67a968d
[ "Apache-2.0" ]
1
2017-05-25T21:05:40.000Z
2017-05-25T23:54:15.000Z
exatomic/interfaces/tests/test_cube.py
tjduigna/exatomic
3e27233084588bc6a58b63fc81aaf5a6b67a968d
[ "Apache-2.0" ]
1
2017-05-25T20:48:33.000Z
2017-05-25T20:48:33.000Z
# -*- coding: utf-8 -*- ## Copyright (c) 2015-2018, Exa Analytics Development Team ## Distributed under the terms of the Apache License 2.0 """ Tests for :mod:`~exatomic.interfaces.cube` ############################################# """ import numpy as np from unittest import TestCase from exatomic.base import resource, staticdir from exatomic.interfaces.cube import Cube, uni_from_cubes class TestCube(TestCase): """Tests cube reading and writing.""" def setUp(self): self.lg = Cube(resource('mol-carbon-dz-1.cube')) self.sm1 = Cube(resource('adf-lu-35.cube')) self.sm2 = Cube(resource('adf-lu-36.cube')) self.uni = uni_from_cubes(staticdir() + '/cube/', ext='*lu*cube') def test_parse_atom(self): self.lg.parse_atom() self.sm1.parse_atom() self.sm2.parse_atom() self.assertEquals(self.lg.atom.shape[0], 1) self.assertEquals(self.sm1.atom.shape[0], 1) self.assertEquals(self.sm2.atom.shape[0], 1) def test_parse_field(self): self.lg.parse_field() self.sm1.parse_field() self.sm2.parse_field() self.assertEquals(self.lg.field.shape[0], 1) self.assertEquals(self.sm1.field.shape[0], 1) self.assertEquals(self.sm2.field.shape[0], 1) self.assertEquals(self.lg.field.field_values[0].shape[0], 132651) self.assertEquals(self.sm1.field.field_values[0].shape[0], 4913) self.assertEquals(self.sm2.field.field_values[0].shape[0], 4913) def test_to_universe(self): lg = self.lg.to_universe() sm1 = self.sm1.to_universe() sm2 = self.sm2.to_universe() for uni in [lg, sm1, sm2]: for attr in ['atom', 'field']: self.assertTrue(hasattr(uni, attr)) def test_uni_from_cubes_rotate_and_write(self): self.assertEquals(self.uni.field.shape[0], 2) self.assertEquals(len(self.uni.field.field_values), 2) rot = self.uni.field.rotate(0, 1, np.pi / 4) self.assertEquals(rot.shape[0], 2) f = Cube.from_universe(self.uni, 1) self.assertEquals(len(f), 874)
37.245614
73
0.629298
nterfaces.cube import Cube, uni_from_cubes class TestCube(TestCase): def setUp(self): self.lg = Cube(resource('mol-carbon-dz-1.cube')) self.sm1 = Cube(resource('adf-lu-35.cube')) self.sm2 = Cube(resource('adf-lu-36.cube')) self.uni = uni_from_cubes(staticdir() + '/cube/', ext='*lu*cube') def test_parse_atom(self): self.lg.parse_atom() self.sm1.parse_atom() self.sm2.parse_atom() self.assertEquals(self.lg.atom.shape[0], 1) self.assertEquals(self.sm1.atom.shape[0], 1) self.assertEquals(self.sm2.atom.shape[0], 1) def test_parse_field(self): self.lg.parse_field() self.sm1.parse_field() self.sm2.parse_field() self.assertEquals(self.lg.field.shape[0], 1) self.assertEquals(self.sm1.field.shape[0], 1) self.assertEquals(self.sm2.field.shape[0], 1) self.assertEquals(self.lg.field.field_values[0].shape[0], 132651) self.assertEquals(self.sm1.field.field_values[0].shape[0], 4913) self.assertEquals(self.sm2.field.field_values[0].shape[0], 4913) def test_to_universe(self): lg = self.lg.to_universe() sm1 = self.sm1.to_universe() sm2 = self.sm2.to_universe() for uni in [lg, sm1, sm2]: for attr in ['atom', 'field']: self.assertTrue(hasattr(uni, attr)) def test_uni_from_cubes_rotate_and_write(self): self.assertEquals(self.uni.field.shape[0], 2) self.assertEquals(len(self.uni.field.field_values), 2) rot = self.uni.field.rotate(0, 1, np.pi / 4) self.assertEquals(rot.shape[0], 2) f = Cube.from_universe(self.uni, 1) self.assertEquals(len(f), 874)
true
true
f711b36657fa38518e47732c9b1d3cb2046ba5c7
5,371
py
Python
lldb/test/API/functionalities/plugins/python_os_plugin/stepping_plugin_threads/TestOSPluginStepping.py
mkinsner/llvm
589d48844edb12cd357b3024248b93d64b6760bf
[ "Apache-2.0" ]
2,338
2018-06-19T17:34:51.000Z
2022-03-31T11:00:37.000Z
lldb/test/API/functionalities/plugins/python_os_plugin/stepping_plugin_threads/TestOSPluginStepping.py
mkinsner/llvm
589d48844edb12cd357b3024248b93d64b6760bf
[ "Apache-2.0" ]
3,740
2019-01-23T15:36:48.000Z
2022-03-31T22:01:13.000Z
lldb/test/API/functionalities/plugins/python_os_plugin/stepping_plugin_threads/TestOSPluginStepping.py
mkinsner/llvm
589d48844edb12cd357b3024248b93d64b6760bf
[ "Apache-2.0" ]
500
2019-01-23T07:49:22.000Z
2022-03-30T02:59:37.000Z
""" Test that stepping works even when the OS Plugin doesn't report all threads at every stop. """ from __future__ import print_function import os import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * import lldbsuite.test.lldbutil as lldbutil class TestOSPluginStepping(TestBase): mydir = TestBase.compute_mydir(__file__) NO_DEBUG_INFO_TESTCASE = True @skipIfWindows @skipIf(oslist=["freebsd"], bugnumber="llvm.org/pr48352") def test_python_os_plugin(self): """Test that stepping works when the OS Plugin doesn't report all threads at every stop""" self.build() self.main_file = lldb.SBFileSpec('main.cpp') self.run_python_os_step_missing_thread(False) @skipIfWindows @skipIf(oslist=["freebsd"], bugnumber="llvm.org/pr48352") def test_python_os_plugin_prune(self): """Test that pruning the unreported PlanStacks works""" self.build() self.main_file = lldb.SBFileSpec('main.cpp') self.run_python_os_step_missing_thread(True) def get_os_thread(self): return self.process.GetThreadByID(0x111111111) def is_os_thread(self, thread): id = thread.GetID() return id == 0x111111111 def run_python_os_step_missing_thread(self, do_prune): """Test that the Python operating system plugin works correctly""" # Our OS plugin does NOT report all threads: result = self.dbg.HandleCommand("settings set process.experimental.os-plugin-reports-all-threads false") python_os_plugin_path = os.path.join(self.getSourceDir(), "operating_system.py") (target, self.process, thread, thread_bkpt) = lldbutil.run_to_source_breakpoint( self, "first stop in thread - do a step out", self.main_file) main_bkpt = target.BreakpointCreateBySourceRegex('Stop here and do not make a memory thread for thread_1', self.main_file) self.assertEqual(main_bkpt.GetNumLocations(), 1, "Main breakpoint has one location") # There should not be an os thread before we load the plugin: self.assertFalse(self.get_os_thread().IsValid(), "No OS thread before loading plugin") # Now load the python OS plug-in which should update the thread list and we should have # an OS plug-in thread overlaying thread_1 with id 0x111111111 command = "settings set target.process.python-os-plugin-path '%s'" % python_os_plugin_path self.dbg.HandleCommand(command) # Verify our OS plug-in threads showed up os_thread = self.get_os_thread() self.assertTrue( os_thread.IsValid(), "Make sure we added the thread 0x111111111 after we load the python OS plug-in") # Now we are going to step-out. This should get interrupted by main_bkpt. We've # set up the OS plugin so at this stop, we have lost the OS thread 0x111111111. # Make sure both of these are true: os_thread.StepOut() stopped_threads = lldbutil.get_threads_stopped_at_breakpoint(self.process, main_bkpt) self.assertEqual(len(stopped_threads), 1, "Stopped at main_bkpt") thread = self.process.GetThreadByID(0x111111111) self.assertFalse(thread.IsValid(), "No thread 0x111111111 on second stop.") # Make sure we still have the thread plans for this thread: # First, don't show unreported threads, that should fail: command = "thread plan list -t 0x111111111" result = lldb.SBCommandReturnObject() interp = self.dbg.GetCommandInterpreter() interp.HandleCommand(command, result) self.assertFalse(result.Succeeded(), "We found no plans for the unreported thread.") # Now do it again but with the -u flag: command = "thread plan list -u -t 0x111111111" result = lldb.SBCommandReturnObject() interp.HandleCommand(command, result) self.assertTrue(result.Succeeded(), "We found plans for the unreported thread.") if do_prune: # Prune the thread plan and continue, and we will run to exit. interp.HandleCommand("thread plan prune 0x111111111", result) self.assertTrue(result.Succeeded(), "Found the plan for 0x111111111 and pruned it") # List again, make sure it doesn't work: command = "thread plan list -u -t 0x111111111" interp.HandleCommand(command, result) self.assertFalse(result.Succeeded(), "We still found plans for the unreported thread.") self.process.Continue() self.assertEqual(self.process.GetState(), lldb.eStateExited, "We exited.") else: # Now we are going to continue, and when we hit the step-out breakpoint, we will # put the OS plugin thread back, lldb will recover its ThreadPlanStack, and # we will stop with a "step-out" reason. self.process.Continue() os_thread = self.get_os_thread() self.assertTrue(os_thread.IsValid(), "The OS thread is back after continue") self.assertIn("step out", os_thread.GetStopDescription(100), "Completed step out plan")
45.134454
114
0.65984
from __future__ import print_function import os import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * import lldbsuite.test.lldbutil as lldbutil class TestOSPluginStepping(TestBase): mydir = TestBase.compute_mydir(__file__) NO_DEBUG_INFO_TESTCASE = True @skipIfWindows @skipIf(oslist=["freebsd"], bugnumber="llvm.org/pr48352") def test_python_os_plugin(self): self.build() self.main_file = lldb.SBFileSpec('main.cpp') self.run_python_os_step_missing_thread(False) @skipIfWindows @skipIf(oslist=["freebsd"], bugnumber="llvm.org/pr48352") def test_python_os_plugin_prune(self): self.build() self.main_file = lldb.SBFileSpec('main.cpp') self.run_python_os_step_missing_thread(True) def get_os_thread(self): return self.process.GetThreadByID(0x111111111) def is_os_thread(self, thread): id = thread.GetID() return id == 0x111111111 def run_python_os_step_missing_thread(self, do_prune): result = self.dbg.HandleCommand("settings set process.experimental.os-plugin-reports-all-threads false") python_os_plugin_path = os.path.join(self.getSourceDir(), "operating_system.py") (target, self.process, thread, thread_bkpt) = lldbutil.run_to_source_breakpoint( self, "first stop in thread - do a step out", self.main_file) main_bkpt = target.BreakpointCreateBySourceRegex('Stop here and do not make a memory thread for thread_1', self.main_file) self.assertEqual(main_bkpt.GetNumLocations(), 1, "Main breakpoint has one location") self.assertFalse(self.get_os_thread().IsValid(), "No OS thread before loading plugin") command = "settings set target.process.python-os-plugin-path '%s'" % python_os_plugin_path self.dbg.HandleCommand(command) os_thread = self.get_os_thread() self.assertTrue( os_thread.IsValid(), "Make sure we added the thread 0x111111111 after we load the python OS plug-in") # set up the OS plugin so at this stop, we have lost the OS thread 0x111111111. # Make sure both of these are true: os_thread.StepOut() stopped_threads = lldbutil.get_threads_stopped_at_breakpoint(self.process, main_bkpt) self.assertEqual(len(stopped_threads), 1, "Stopped at main_bkpt") thread = self.process.GetThreadByID(0x111111111) self.assertFalse(thread.IsValid(), "No thread 0x111111111 on second stop.") # Make sure we still have the thread plans for this thread: # First, don't show unreported threads, that should fail: command = "thread plan list -t 0x111111111" result = lldb.SBCommandReturnObject() interp = self.dbg.GetCommandInterpreter() interp.HandleCommand(command, result) self.assertFalse(result.Succeeded(), "We found no plans for the unreported thread.") command = "thread plan list -u -t 0x111111111" result = lldb.SBCommandReturnObject() interp.HandleCommand(command, result) self.assertTrue(result.Succeeded(), "We found plans for the unreported thread.") if do_prune: interp.HandleCommand("thread plan prune 0x111111111", result) self.assertTrue(result.Succeeded(), "Found the plan for 0x111111111 and pruned it") command = "thread plan list -u -t 0x111111111" interp.HandleCommand(command, result) self.assertFalse(result.Succeeded(), "We still found plans for the unreported thread.") self.process.Continue() self.assertEqual(self.process.GetState(), lldb.eStateExited, "We exited.") else: # Now we are going to continue, and when we hit the step-out breakpoint, we will # put the OS plugin thread back, lldb will recover its ThreadPlanStack, and # we will stop with a "step-out" reason. self.process.Continue() os_thread = self.get_os_thread() self.assertTrue(os_thread.IsValid(), "The OS thread is back after continue") self.assertIn("step out", os_thread.GetStopDescription(100), "Completed step out plan")
true
true
f711b3fdfa190d5ada52a8c9c636c1e426058e74
2,702
py
Python
get_asa_full_config.py
otronomo/netmiko_based
cef803a83de475bcc95403f5caaa73937095a92f
[ "Apache-2.0" ]
null
null
null
get_asa_full_config.py
otronomo/netmiko_based
cef803a83de475bcc95403f5caaa73937095a92f
[ "Apache-2.0" ]
null
null
null
get_asa_full_config.py
otronomo/netmiko_based
cef803a83de475bcc95403f5caaa73937095a92f
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python3 # BY NOMO from netmiko import Netmiko from getpass import getpass from datetime import datetime from pprint import pprint import re import os import sys import socket # Vars config_dir = "/home/reponeg/logs/asa_configs" # Function for DNS resolution def hostnameLookup(hostname): try: socket.gethostbyname(hostname) return 1 # If lookup works except socket.error: return 0 # If lookup fails def getShowRun(connection_handle, context, dirname): output = connection_handle.send_command("changeto context " + context) sh_run = connection_handle.send_command("show run") hostname_simple = re.findall( r'(.+?)\.+', hostname_arg )[0] file_path = dirname + "/" + "sh_run_" + hostname_simple +"_"+ context + ".txt" with open(file_path, "w") as file_handle: file_handle.write(sh_run) return 1 # Check arguments for hostname and hostname dns resolution if len(sys.argv) < 4: print("\nMissing parameter. Please enter the hostname or IP address:") print("\nUsage:", sys.argv[0], "<hostname>\n\n") exit() elif len(sys.argv) > 4: print("Too many parameters. Use a single hostname.") exit() hostname_arg = sys.argv[1] username = sys.argv[2] password = sys.argv[3] dns_lookup_result = hostnameLookup(hostname_arg) if dns_lookup_result == 0: print("Hostname lookup for %s failed. Please check name and retry." %(hostname_arg) ) exit() # Device asa = { 'host': hostname_arg, 'username': username, 'password': password, 'device_type': 'cisco_asa' } auth_pending = True while auth_pending: try: conn1 = Netmiko(**asa) auth_pending = False except: print("Authentication failed. This is host " + hostname_arg) asa['username'] = input("\nEnter your Username FOR THIS HOST): ") asa['password'] = getpass() try: conn1 = Netmiko(**asa) pass except: print("Failed to authenticate on " + hostname_arg + "\nTry again.") pass # Move to context sys to grab the list of all contexts command = "changeto context sys" output = conn1.send_command(command) command = "show run | i context" output = conn1.send_command(command).splitlines() # Get the list context_list = [] for line in output: if line.startswith("context "): context_name = line.replace("context ", "") context_list.append(context_name) # Start hopping contexts and retrieving running configs for context in context_list: getShowRun(conn1, context, config_dir) print("Retrieved config for contexts:") print(context_list) print("\n")
25.252336
89
0.661362
from netmiko import Netmiko from getpass import getpass from datetime import datetime from pprint import pprint import re import os import sys import socket config_dir = "/home/reponeg/logs/asa_configs" def hostnameLookup(hostname): try: socket.gethostbyname(hostname) return 1 except socket.error: return 0 def getShowRun(connection_handle, context, dirname): output = connection_handle.send_command("changeto context " + context) sh_run = connection_handle.send_command("show run") hostname_simple = re.findall( r'(.+?)\.+', hostname_arg )[0] file_path = dirname + "/" + "sh_run_" + hostname_simple +"_"+ context + ".txt" with open(file_path, "w") as file_handle: file_handle.write(sh_run) return 1 if len(sys.argv) < 4: print("\nMissing parameter. Please enter the hostname or IP address:") print("\nUsage:", sys.argv[0], "<hostname>\n\n") exit() elif len(sys.argv) > 4: print("Too many parameters. Use a single hostname.") exit() hostname_arg = sys.argv[1] username = sys.argv[2] password = sys.argv[3] dns_lookup_result = hostnameLookup(hostname_arg) if dns_lookup_result == 0: print("Hostname lookup for %s failed. Please check name and retry." %(hostname_arg) ) exit() asa = { 'host': hostname_arg, 'username': username, 'password': password, 'device_type': 'cisco_asa' } auth_pending = True while auth_pending: try: conn1 = Netmiko(**asa) auth_pending = False except: print("Authentication failed. This is host " + hostname_arg) asa['username'] = input("\nEnter your Username FOR THIS HOST): ") asa['password'] = getpass() try: conn1 = Netmiko(**asa) pass except: print("Failed to authenticate on " + hostname_arg + "\nTry again.") pass command = "changeto context sys" output = conn1.send_command(command) command = "show run | i context" output = conn1.send_command(command).splitlines() context_list = [] for line in output: if line.startswith("context "): context_name = line.replace("context ", "") context_list.append(context_name) for context in context_list: getShowRun(conn1, context, config_dir) print("Retrieved config for contexts:") print(context_list) print("\n")
true
true
f711b5328a1be3b753056e88ac6fdbce2cf20554
6,448
py
Python
ros/src/twist_controller/dbw_node.py
tochalid/CarND-Capstone
57dc493180ee34f3842ccd0dc4fb3678368f12bb
[ "MIT" ]
null
null
null
ros/src/twist_controller/dbw_node.py
tochalid/CarND-Capstone
57dc493180ee34f3842ccd0dc4fb3678368f12bb
[ "MIT" ]
1
2018-06-09T20:59:50.000Z
2018-06-09T20:59:50.000Z
ros/src/twist_controller/dbw_node.py
tochalid/CarND-Capstone
57dc493180ee34f3842ccd0dc4fb3678368f12bb
[ "MIT" ]
4
2018-05-10T14:50:41.000Z
2018-06-02T23:50:11.000Z
#!/usr/bin/env python import rospy from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd from geometry_msgs.msg import TwistStamped from std_msgs.msg import Bool from twist_controller import Controller ''' You can build this node only after you have built (or partially built) the `waypoint_updater` node. You will subscribe to `/twist_cmd` message which provides the proposed linear and angular velocities. You can subscribe to any other message that you find important or refer to the document for list of messages subscribed to by the reference implementation of this node. One thing to keep in mind while building this node and the `twist_controller` class is the status of `dbw_enabled`. While in the simulator, its enabled all the time, in the real car, that will not be the case. This may cause your PID controller to accumulate error because the car could temporarily be driven by a human instead of your controller. We have provided two launch files with this node. Vehicle specific values (like vehicle_mass, wheel_base) etc should not be altered in these files. We have also provided some reference implementations for PID controller and other utility classes. You are free to use them or build your own. Once you have the proposed throttle, brake, and steer values, publish it on the various publishers that we have created in the `__init__` function. ''' class DBWNode(object): def __init__(self): rospy.init_node('dbw_node') vehicle_mass = rospy.get_param('~vehicle_mass', 1736.35) fuel_capacity = rospy.get_param('~fuel_capacity', 13.5) brake_deadband = rospy.get_param('~brake_deadband', .1) decel_limit = rospy.get_param('~decel_limit', -5) accel_limit = rospy.get_param('~accel_limit', 1.) wheel_radius = rospy.get_param('~wheel_radius', 0.2413) wheel_base = rospy.get_param('~wheel_base', 2.8498) steer_ratio = rospy.get_param('~steer_ratio', 14.8) max_lat_accel = rospy.get_param('~max_lat_accel', 3.) max_steer_angle = rospy.get_param('~max_steer_angle', 8.) self.steer_pub = rospy.Publisher('/vehicle/steering_cmd', SteeringCmd, queue_size=1) self.throttle_pub = rospy.Publisher('/vehicle/throttle_cmd', ThrottleCmd, queue_size=1) self.brake_pub = rospy.Publisher('/vehicle/brake_cmd', BrakeCmd, queue_size=1) # TODO: Create `Controller` object # self.controller = Controller(<Arguments you wish to provide>) self.controller = Controller(vehicle_mass=vehicle_mass, fuel_capacity=fuel_capacity, brake_deadband=brake_deadband, decel_limit=decel_limit, accel_limit=accel_limit, wheel_radius=wheel_radius, wheel_base=wheel_base, steer_ratio=steer_ratio, max_lat_accel=max_lat_accel, max_steer_angle=max_steer_angle) # TODO: Subscribe to all the topics you need to rospy.Subscriber('/twist_cmd', TwistStamped, self.twist_cmd_cb) rospy.Subscriber('/current_velocity', TwistStamped, self.current_velocity_cb) rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb, queue_size=1) self.current_vel = None self.curr_ang_vel = None self.dbw_enabled = None self.target_vel = None self.target_angular_vel = None self.throttle = self.steering = self.brake = 0 self.loop() def loop(self): rate = rospy.Rate(50) # 50Hz while not rospy.is_shutdown(): # TODO: Get predicted throttle, brake, and steering using `twist_controller` # You should only publish the control commands if dbw is enabled # throttle, brake, steering = self.controller.control(<proposed linear velocity>, # <proposed angular velocity>, # <current linear velocity>, # <dbw status>, # <any other argument you need>) # if <dbw is enabled>: # self.publish(throttle, brake, steer) # If autonomous system is enabled if self.dbw_enabled and (self.current_vel is not None) and (self.target_vel is not None) and ( self.target_angular_vel is not None): self.throttle, self.brake, self.steering = self.controller.control(self.current_vel, self.target_vel, self.target_angular_vel) self.publish(self.throttle, self.brake, self.steering) rate.sleep() def dbw_enabled_cb(self, msg): # Get the driving mode self.controller.reset() self.dbw_enabled = msg.data if self.dbw_enabled: rospy.logwarn('TwistController is online.') else: rospy.logwarn('TwistController is offline.') def twist_cmd_cb(self, msg): # Get the desired velocity self.target_vel = msg.twist.linear.x self.target_angular_vel = msg.twist.angular.z #rospy.logwarn("target_vel %f"%self.target_vel) def current_velocity_cb(self, msg): # Get current velocity self.current_vel = msg.twist.linear.x def publish(self, throttle, brake, steer): tcmd = ThrottleCmd() tcmd.enable = True tcmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT tcmd.pedal_cmd = throttle self.throttle_pub.publish(tcmd) scmd = SteeringCmd() scmd.enable = True scmd.steering_wheel_angle_cmd = steer self.steer_pub.publish(scmd) bcmd = BrakeCmd() bcmd.enable = True bcmd.pedal_cmd_type = BrakeCmd.CMD_TORQUE bcmd.pedal_cmd = brake self.brake_pub.publish(bcmd) if __name__ == '__main__': DBWNode()
45.090909
107
0.603133
import rospy from dbw_mkz_msgs.msg import ThrottleCmd, SteeringCmd, BrakeCmd from geometry_msgs.msg import TwistStamped from std_msgs.msg import Bool from twist_controller import Controller class DBWNode(object): def __init__(self): rospy.init_node('dbw_node') vehicle_mass = rospy.get_param('~vehicle_mass', 1736.35) fuel_capacity = rospy.get_param('~fuel_capacity', 13.5) brake_deadband = rospy.get_param('~brake_deadband', .1) decel_limit = rospy.get_param('~decel_limit', -5) accel_limit = rospy.get_param('~accel_limit', 1.) wheel_radius = rospy.get_param('~wheel_radius', 0.2413) wheel_base = rospy.get_param('~wheel_base', 2.8498) steer_ratio = rospy.get_param('~steer_ratio', 14.8) max_lat_accel = rospy.get_param('~max_lat_accel', 3.) max_steer_angle = rospy.get_param('~max_steer_angle', 8.) self.steer_pub = rospy.Publisher('/vehicle/steering_cmd', SteeringCmd, queue_size=1) self.throttle_pub = rospy.Publisher('/vehicle/throttle_cmd', ThrottleCmd, queue_size=1) self.brake_pub = rospy.Publisher('/vehicle/brake_cmd', BrakeCmd, queue_size=1) self.controller = Controller(vehicle_mass=vehicle_mass, fuel_capacity=fuel_capacity, brake_deadband=brake_deadband, decel_limit=decel_limit, accel_limit=accel_limit, wheel_radius=wheel_radius, wheel_base=wheel_base, steer_ratio=steer_ratio, max_lat_accel=max_lat_accel, max_steer_angle=max_steer_angle) rospy.Subscriber('/twist_cmd', TwistStamped, self.twist_cmd_cb) rospy.Subscriber('/current_velocity', TwistStamped, self.current_velocity_cb) rospy.Subscriber('/vehicle/dbw_enabled', Bool, self.dbw_enabled_cb, queue_size=1) self.current_vel = None self.curr_ang_vel = None self.dbw_enabled = None self.target_vel = None self.target_angular_vel = None self.throttle = self.steering = self.brake = 0 self.loop() def loop(self): rate = rospy.Rate(50) while not rospy.is_shutdown(): if self.dbw_enabled and (self.current_vel is not None) and (self.target_vel is not None) and ( self.target_angular_vel is not None): self.throttle, self.brake, self.steering = self.controller.control(self.current_vel, self.target_vel, self.target_angular_vel) self.publish(self.throttle, self.brake, self.steering) rate.sleep() def dbw_enabled_cb(self, msg): self.controller.reset() self.dbw_enabled = msg.data if self.dbw_enabled: rospy.logwarn('TwistController is online.') else: rospy.logwarn('TwistController is offline.') def twist_cmd_cb(self, msg): self.target_vel = msg.twist.linear.x self.target_angular_vel = msg.twist.angular.z def current_velocity_cb(self, msg): self.current_vel = msg.twist.linear.x def publish(self, throttle, brake, steer): tcmd = ThrottleCmd() tcmd.enable = True tcmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT tcmd.pedal_cmd = throttle self.throttle_pub.publish(tcmd) scmd = SteeringCmd() scmd.enable = True scmd.steering_wheel_angle_cmd = steer self.steer_pub.publish(scmd) bcmd = BrakeCmd() bcmd.enable = True bcmd.pedal_cmd_type = BrakeCmd.CMD_TORQUE bcmd.pedal_cmd = brake self.brake_pub.publish(bcmd) if __name__ == '__main__': DBWNode()
true
true
f711b54c08bfea89b343bd24d2a4027f25566049
1,803
py
Python
utils/model.py
alipsgh/deep-mix-nets
3c60897687046523d58a321ca0f7cd69dbcf78a1
[ "MIT" ]
null
null
null
utils/model.py
alipsgh/deep-mix-nets
3c60897687046523d58a321ca0f7cd69dbcf78a1
[ "MIT" ]
null
null
null
utils/model.py
alipsgh/deep-mix-nets
3c60897687046523d58a321ca0f7cd69dbcf78a1
[ "MIT" ]
null
null
null
import yaml from models.fasttext import FastText from models.attention_rnn import AttentionRNN from models.rcnn import RCNN from models.textcnn import TextCNN from models.textrnn import TextRNN from models.transformer import Transformer from utils.logger import get_logger def instantiate_model(model_name, vocab_size, embeddings): multi_layer_args = yaml.load(open('./configs/multi_layer.yml'), Loader=yaml.FullLoader) if model_name == "rcnn": model_args = yaml.load(open('./configs/rcnn.yml'), Loader=yaml.FullLoader) model = RCNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "textcnn": model_args = yaml.load(open('./configs/textcnn.yml'), Loader=yaml.FullLoader) model = TextCNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "textrnn": model_args = yaml.load(open('./configs/textrnn.yml'), Loader=yaml.FullLoader) model = TextRNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "attention_rnn": model_args = yaml.load(open('./configs/attention_rnn.yml'), Loader=yaml.FullLoader) model = AttentionRNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "transformer": model_args = yaml.load(open('./configs/transformer.yml'), Loader=yaml.FullLoader) model = Transformer(vocab_size, embeddings, **{**model_args, **multi_layer_args}) else: model_args = yaml.load(open('./configs/fasttext.yml'), Loader=yaml.FullLoader) model = FastText(vocab_size, embeddings, **{**model_args, **multi_layer_args}) logger = get_logger(__name__) logger.info("A model of {} is instantiated.".format(model.__class__.__name__)) return model
40.066667
91
0.710483
import yaml from models.fasttext import FastText from models.attention_rnn import AttentionRNN from models.rcnn import RCNN from models.textcnn import TextCNN from models.textrnn import TextRNN from models.transformer import Transformer from utils.logger import get_logger def instantiate_model(model_name, vocab_size, embeddings): multi_layer_args = yaml.load(open('./configs/multi_layer.yml'), Loader=yaml.FullLoader) if model_name == "rcnn": model_args = yaml.load(open('./configs/rcnn.yml'), Loader=yaml.FullLoader) model = RCNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "textcnn": model_args = yaml.load(open('./configs/textcnn.yml'), Loader=yaml.FullLoader) model = TextCNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "textrnn": model_args = yaml.load(open('./configs/textrnn.yml'), Loader=yaml.FullLoader) model = TextRNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "attention_rnn": model_args = yaml.load(open('./configs/attention_rnn.yml'), Loader=yaml.FullLoader) model = AttentionRNN(vocab_size, embeddings, **{**model_args, **multi_layer_args}) elif model_name == "transformer": model_args = yaml.load(open('./configs/transformer.yml'), Loader=yaml.FullLoader) model = Transformer(vocab_size, embeddings, **{**model_args, **multi_layer_args}) else: model_args = yaml.load(open('./configs/fasttext.yml'), Loader=yaml.FullLoader) model = FastText(vocab_size, embeddings, **{**model_args, **multi_layer_args}) logger = get_logger(__name__) logger.info("A model of {} is instantiated.".format(model.__class__.__name__)) return model
true
true
f711b5c6cb4a750d5b26cab33dd01b22cd4a4722
4,606
py
Python
celery_progress/backend.py
darwin-homes/celery-progress
ec3c8ea0582f75b9181fab3ffc746ef982f00839
[ "MIT" ]
null
null
null
celery_progress/backend.py
darwin-homes/celery-progress
ec3c8ea0582f75b9181fab3ffc746ef982f00839
[ "MIT" ]
null
null
null
celery_progress/backend.py
darwin-homes/celery-progress
ec3c8ea0582f75b9181fab3ffc746ef982f00839
[ "MIT" ]
null
null
null
import datetime import logging from abc import ABCMeta, abstractmethod from decimal import Decimal from celery.result import EagerResult, allow_join_result from celery.backends.base import DisabledBackend logger = logging.getLogger(__name__) PROGRESS_STATE = 'PROGRESS' class AbstractProgressRecorder(object): __metaclass__ = ABCMeta @abstractmethod def set_progress(self, current, total, description=""): pass class ConsoleProgressRecorder(AbstractProgressRecorder): def set_progress(self, current, total, description=""): print('processed {} items of {}. {}'.format(current, total, description)) class ProgressRecorder(AbstractProgressRecorder): def __init__(self, task): self.task = task def set_progress(self, current, total, description=""): percent = 0 if total > 0: percent = (Decimal(current) / Decimal(total)) * Decimal(100) percent = float(round(percent, 2)) state = PROGRESS_STATE meta = { 'pending': False, 'current': current, 'total': total, 'percent': percent, 'description': description } self.task.update_state( state=state, meta=meta ) return state, meta class Progress(object): def __init__(self, result): """ result: an AsyncResult or an object that mimics it to a degree """ self.result = result def get_info(self): state = self.result.state response = {'state': state} if state in ['SUCCESS', 'FAILURE']: success = self.result.successful() with allow_join_result(): response.update({ 'complete': True, 'success': success, 'progress': _get_completed_progress(), 'result': self.result.get(self.result.id) if success else str(self.result.info), }) elif state in ['RETRY', 'REVOKED']: if state == 'RETRY': retry = self.result.info when = str(retry.when) if isinstance(retry.when, datetime.datetime) else str( datetime.datetime.now() + datetime.timedelta(seconds=retry.when)) result = {'when': when, 'message': retry.message or str(retry.exc)} else: result = 'Task ' + str(self.result.info) response.update({ 'complete': True, 'success': False, 'progress': _get_completed_progress(), 'result': result, }) elif state == 'IGNORED': response.update({ 'complete': True, 'success': None, 'progress': _get_completed_progress(), 'result': str(self.result.info) }) elif state == PROGRESS_STATE: response.update({ 'complete': False, 'success': None, 'progress': self.result.info, }) elif state in ['PENDING', 'STARTED']: response.update({ 'complete': False, 'success': None, 'progress': _get_unknown_progress(state), }) else: logger.error('Task %s has unknown state %s with metadata %s', self.result.id, state, self.result.info) response.update({ 'complete': True, 'success': False, 'progress': _get_unknown_progress(state), 'result': 'Unknown state {}'.format(state), }) return response class KnownResult(EagerResult): """Like EagerResult but supports non-ready states.""" def __init__(self, id, ret_value, state, traceback=None): """ ret_value: result, exception, or progress metadata """ # set backend to get state groups (like READY_STATES in ready()) self.backend = DisabledBackend super().__init__(id, ret_value, state, traceback) def ready(self): return super(EagerResult, self).ready() def __del__(self): # throws an exception if not overridden pass def _get_completed_progress(): return { 'pending': False, 'current': 100, 'total': 100, 'percent': 100, } def _get_unknown_progress(state): return { 'pending': state == 'PENDING', 'current': 0, 'total': 100, 'percent': 0, }
30.104575
114
0.546244
import datetime import logging from abc import ABCMeta, abstractmethod from decimal import Decimal from celery.result import EagerResult, allow_join_result from celery.backends.base import DisabledBackend logger = logging.getLogger(__name__) PROGRESS_STATE = 'PROGRESS' class AbstractProgressRecorder(object): __metaclass__ = ABCMeta @abstractmethod def set_progress(self, current, total, description=""): pass class ConsoleProgressRecorder(AbstractProgressRecorder): def set_progress(self, current, total, description=""): print('processed {} items of {}. {}'.format(current, total, description)) class ProgressRecorder(AbstractProgressRecorder): def __init__(self, task): self.task = task def set_progress(self, current, total, description=""): percent = 0 if total > 0: percent = (Decimal(current) / Decimal(total)) * Decimal(100) percent = float(round(percent, 2)) state = PROGRESS_STATE meta = { 'pending': False, 'current': current, 'total': total, 'percent': percent, 'description': description } self.task.update_state( state=state, meta=meta ) return state, meta class Progress(object): def __init__(self, result): self.result = result def get_info(self): state = self.result.state response = {'state': state} if state in ['SUCCESS', 'FAILURE']: success = self.result.successful() with allow_join_result(): response.update({ 'complete': True, 'success': success, 'progress': _get_completed_progress(), 'result': self.result.get(self.result.id) if success else str(self.result.info), }) elif state in ['RETRY', 'REVOKED']: if state == 'RETRY': retry = self.result.info when = str(retry.when) if isinstance(retry.when, datetime.datetime) else str( datetime.datetime.now() + datetime.timedelta(seconds=retry.when)) result = {'when': when, 'message': retry.message or str(retry.exc)} else: result = 'Task ' + str(self.result.info) response.update({ 'complete': True, 'success': False, 'progress': _get_completed_progress(), 'result': result, }) elif state == 'IGNORED': response.update({ 'complete': True, 'success': None, 'progress': _get_completed_progress(), 'result': str(self.result.info) }) elif state == PROGRESS_STATE: response.update({ 'complete': False, 'success': None, 'progress': self.result.info, }) elif state in ['PENDING', 'STARTED']: response.update({ 'complete': False, 'success': None, 'progress': _get_unknown_progress(state), }) else: logger.error('Task %s has unknown state %s with metadata %s', self.result.id, state, self.result.info) response.update({ 'complete': True, 'success': False, 'progress': _get_unknown_progress(state), 'result': 'Unknown state {}'.format(state), }) return response class KnownResult(EagerResult): def __init__(self, id, ret_value, state, traceback=None): self.backend = DisabledBackend super().__init__(id, ret_value, state, traceback) def ready(self): return super(EagerResult, self).ready() def __del__(self): pass def _get_completed_progress(): return { 'pending': False, 'current': 100, 'total': 100, 'percent': 100, } def _get_unknown_progress(state): return { 'pending': state == 'PENDING', 'current': 0, 'total': 100, 'percent': 0, }
true
true
f711b676acf632290d811f521077bc40029b180d
3,619
py
Python
shamrock/consensus/default_constants.py
zcomputerwiz/shamrock-blockchain
2e2d8a134f0147379812085543ac98f37ce28c2b
[ "Apache-2.0" ]
null
null
null
shamrock/consensus/default_constants.py
zcomputerwiz/shamrock-blockchain
2e2d8a134f0147379812085543ac98f37ce28c2b
[ "Apache-2.0" ]
null
null
null
shamrock/consensus/default_constants.py
zcomputerwiz/shamrock-blockchain
2e2d8a134f0147379812085543ac98f37ce28c2b
[ "Apache-2.0" ]
null
null
null
from shamrock.util.ints import uint64 from .constants import ConsensusConstants testnet_kwargs = { "SLOT_BLOCKS_TARGET": 32, "MIN_BLOCKS_PER_CHALLENGE_BLOCK": 16, # Must be less than half of SLOT_BLOCKS_TARGET "MAX_SUB_SLOT_BLOCKS": 128, # Must be less than half of SUB_EPOCH_BLOCKS "NUM_SPS_SUB_SLOT": 64, # Must be a power of 2 "SUB_SLOT_ITERS_STARTING": 2 ** 24, # DIFFICULTY_STARTING is the starting difficulty for the first epoch, which is then further # multiplied by another factor of DIFFICULTY_CONSTANT_FACTOR, to be used in the VDF iter calculation formula. "DIFFICULTY_CONSTANT_FACTOR": 2 ** 55, "DIFFICULTY_STARTING": 7, "DIFFICULTY_CHANGE_MAX_FACTOR": 3, # The next difficulty is truncated to range [prev / FACTOR, prev * FACTOR] # These 3 constants must be changed at the same time "SUB_EPOCH_BLOCKS": 384, # The number of blocks per sub-epoch, mainnet 3600 ~ 2 hours "EPOCH_BLOCKS": 768, # The number of blocks per epoch, mainnet 43200 ~ 1 days. Must be multiple of SUB_EPOCH_SB "SIGNIFICANT_BITS": 8, # The number of bits to look at in difficulty and min iters. The rest are zeroed "DISCRIMINANT_SIZE_BITS": 1024, # Max is 1024 (based on ClassGroupElement int size) "NUMBER_ZERO_BITS_PLOT_FILTER": 9, # H(plot signature of the challenge) must start with these many zeroes "MIN_PLOT_SIZE": 32, # 32 for mainnet "MAX_PLOT_SIZE": 50, "SUB_SLOT_TIME_TARGET": 64, # The target number of seconds per slot, mainnet 64 "NUM_SP_INTERVALS_EXTRA": 3, # The number of sp intervals to add to the signage point "MAX_FUTURE_TIME": 5 * 60, # The next block can have a timestamp of at most these many seconds in the future "NUMBER_OF_TIMESTAMPS": 11, # Than the average of the last NUMBER_OF_TIMESTAMPS blocks # Used as the initial cc rc challenges, as well as first block back pointers, and first SES back pointer # We override this value based on the chain being run (testnet0, testnet1, mainnet, etc) # Default used for tests is std_hash(b'') "GENESIS_CHALLENGE": bytes.fromhex("e7fe471110c27b12a5f17c8cc150da370396b8d704c6dd521bc7be99d4f358f6"), # Forks of shamrock should change this value to provide replay attack protection. This is set to mainnet genesis chall "AGG_SIG_ME_ADDITIONAL_DATA": bytes.fromhex("2a6c4b6a3c1e7e13dcf1c77b4553e4d04f1b916a440f61503deedc0899490529"), "GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bytes.fromhex( "d23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc" ), "GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bytes.fromhex( "3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af" ), "MAX_VDF_WITNESS_SIZE": 64, # Size of mempool = 50x the size of block "MEMPOOL_BLOCK_BUFFER": 50, # Max coin amount, fits into 64 bits "MAX_COIN_AMOUNT": uint64((1 << 64) - 1), # Max block cost in clvm cost units "MAX_BLOCK_COST_CLVM": 11000000000, # The cost per byte of generator program "COST_PER_BYTE": 12000, "WEIGHT_PROOF_THRESHOLD": 2, "BLOCKS_CACHE_SIZE": 43200 + (128 * 4), "WEIGHT_PROOF_RECENT_BLOCKS": 380, "MAX_BLOCK_COUNT_PER_REQUESTS": 32, # Allow up to 32 blocks per request #"INITIAL_FREEZE_END_TIMESTAMP": 1627318800, # Mon Jul 26 2021 17:00:00 GMT+0000 "NETWORK_TYPE": 0, "MAX_GENERATOR_SIZE": 1000000, "MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list "POOL_SUB_SLOT_ITERS": 37600000000, # iters limit * NUM_SPS } DEFAULT_CONSTANTS = ConsensusConstants(**testnet_kwargs) # type: ignore
59.327869
122
0.741089
from shamrock.util.ints import uint64 from .constants import ConsensusConstants testnet_kwargs = { "SLOT_BLOCKS_TARGET": 32, "MIN_BLOCKS_PER_CHALLENGE_BLOCK": 16, "MAX_SUB_SLOT_BLOCKS": 128, "NUM_SPS_SUB_SLOT": 64, "SUB_SLOT_ITERS_STARTING": 2 ** 24, "DIFFICULTY_CONSTANT_FACTOR": 2 ** 55, "DIFFICULTY_STARTING": 7, "DIFFICULTY_CHANGE_MAX_FACTOR": 3, "SUB_EPOCH_BLOCKS": 384, "EPOCH_BLOCKS": 768, "SIGNIFICANT_BITS": 8, "DISCRIMINANT_SIZE_BITS": 1024, "NUMBER_ZERO_BITS_PLOT_FILTER": 9, "MIN_PLOT_SIZE": 32, "MAX_PLOT_SIZE": 50, "SUB_SLOT_TIME_TARGET": 64, "NUM_SP_INTERVALS_EXTRA": 3, "MAX_FUTURE_TIME": 5 * 60, "NUMBER_OF_TIMESTAMPS": 11, "GENESIS_CHALLENGE": bytes.fromhex("e7fe471110c27b12a5f17c8cc150da370396b8d704c6dd521bc7be99d4f358f6"), "AGG_SIG_ME_ADDITIONAL_DATA": bytes.fromhex("2a6c4b6a3c1e7e13dcf1c77b4553e4d04f1b916a440f61503deedc0899490529"), "GENESIS_PRE_FARM_POOL_PUZZLE_HASH": bytes.fromhex( "d23da14695a188ae5708dd152263c4db883eb27edeb936178d4d988b8f3ce5fc" ), "GENESIS_PRE_FARM_FARMER_PUZZLE_HASH": bytes.fromhex( "3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af" ), "MAX_VDF_WITNESS_SIZE": 64, "MEMPOOL_BLOCK_BUFFER": 50, "MAX_COIN_AMOUNT": uint64((1 << 64) - 1), "MAX_BLOCK_COST_CLVM": 11000000000, "COST_PER_BYTE": 12000, "WEIGHT_PROOF_THRESHOLD": 2, "BLOCKS_CACHE_SIZE": 43200 + (128 * 4), "WEIGHT_PROOF_RECENT_BLOCKS": 380, "MAX_BLOCK_COUNT_PER_REQUESTS": 32, NERATOR_SIZE": 1000000, "MAX_GENERATOR_REF_LIST_SIZE": 512, "POOL_SUB_SLOT_ITERS": 37600000000, } DEFAULT_CONSTANTS = ConsensusConstants(**testnet_kwargs)
true
true
f711b88b81563a484edd57faf54d35a5138be56c
560
py
Python
src/edu/edu_controller.py
feagi/feagi-core
d83c51480fcbe153fa14b2360b4d61f6ae4e2811
[ "Apache-2.0" ]
11
2020-02-18T16:03:10.000Z
2021-12-06T19:53:06.000Z
src/edu/edu_controller.py
feagi/feagi-core
d83c51480fcbe153fa14b2360b4d61f6ae4e2811
[ "Apache-2.0" ]
34
2019-12-17T04:59:42.000Z
2022-01-18T20:58:46.000Z
src/edu/edu_controller.py
feagi/feagi-core
d83c51480fcbe153fa14b2360b4d61f6ae4e2811
[ "Apache-2.0" ]
3
2019-12-16T06:09:56.000Z
2020-10-18T12:01:31.000Z
""" This module will run as an independent thread and acts as a wrapper to orchestrate the training, testing, etc. Supervised training is coordinated here """ from queue import Queue from threading import Thread from inf import runtime_data def initialize(): return class Controller: def __init__(self): # setup a new thread here return def trainer_mnist(self): return def trainer_fashion_mnist(self): return def tester_mnist(self): return def tester_fashion_mnist(self): return
18.064516
110
0.689286
from queue import Queue from threading import Thread from inf import runtime_data def initialize(): return class Controller: def __init__(self): return def trainer_mnist(self): return def trainer_fashion_mnist(self): return def tester_mnist(self): return def tester_fashion_mnist(self): return
true
true
f711b975bb4838da7a8ff361ad07935ea7c1c91a
1,363
py
Python
setup.py
ggbaro/covid-health-ita
267801c3de021078a1ca5d3b93b47515315f0300
[ "MIT" ]
3
2020-03-25T22:20:07.000Z
2020-03-29T10:01:24.000Z
setup.py
ggbaro/covid-health-ita
267801c3de021078a1ca5d3b93b47515315f0300
[ "MIT" ]
null
null
null
setup.py
ggbaro/covid-health-ita
267801c3de021078a1ca5d3b93b47515315f0300
[ "MIT" ]
null
null
null
import re from os import path from setuptools import find_namespace_packages, setup here = path.abspath(path.dirname(__file__)) with open(path.join(here, "src", "covid_health", "__init__.py")) as init: __version__ = re.findall('__version__ = "([\w\.\-\_]+)"', init.read())[0] with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() with open(path.join(here, "requirements.txt"), encoding="utf-8") as f: all_reqs = f.read().split("\n") with open(path.join(here, "requirements-dev.txt"), encoding="utf-8") as f: dev_reqs = f.read().split("\n") setup( name="covid-health", version=__version__, description="", long_description=long_description, long_description_content_type="text/markdown", author="Giacomo Barone, Buildnn", url="https://www.buildnn.com", license="Copyright © 2020 Giacomo Barone / Buildnn. MIT.", classifiers=[], package_dir={"": "src"}, packages=find_namespace_packages( where="src", include=["*"], exclude=["*.egg-info"] ), include_package_data=True, keywords="", install_requires=all_reqs, extras_require={"dev": dev_reqs}, # dependency_links=dependency_links, author_email="giacomo.barone@buildnn.com", entry_points=""" [console_scripts] covid-data=covid_health.cli:main """, )
29
77
0.663243
import re from os import path from setuptools import find_namespace_packages, setup here = path.abspath(path.dirname(__file__)) with open(path.join(here, "src", "covid_health", "__init__.py")) as init: __version__ = re.findall('__version__ = "([\w\.\-\_]+)"', init.read())[0] with open(path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() with open(path.join(here, "requirements.txt"), encoding="utf-8") as f: all_reqs = f.read().split("\n") with open(path.join(here, "requirements-dev.txt"), encoding="utf-8") as f: dev_reqs = f.read().split("\n") setup( name="covid-health", version=__version__, description="", long_description=long_description, long_description_content_type="text/markdown", author="Giacomo Barone, Buildnn", url="https://www.buildnn.com", license="Copyright © 2020 Giacomo Barone / Buildnn. MIT.", classifiers=[], package_dir={"": "src"}, packages=find_namespace_packages( where="src", include=["*"], exclude=["*.egg-info"] ), include_package_data=True, keywords="", install_requires=all_reqs, extras_require={"dev": dev_reqs}, author_email="giacomo.barone@buildnn.com", entry_points=""" [console_scripts] covid-data=covid_health.cli:main """, )
true
true
f711ba8f3c0faa6064bbd30fb8baee70edbc3a0d
13,844
py
Python
lib-python/3/test/test_readline.py
hollmmax/zig
d80baa5a5fcbc82b3e2294b398edc20a98737a52
[ "MIT" ]
null
null
null
lib-python/3/test/test_readline.py
hollmmax/zig
d80baa5a5fcbc82b3e2294b398edc20a98737a52
[ "MIT" ]
1
2022-02-22T00:59:49.000Z
2022-02-22T00:59:49.000Z
lib-python/3/test/test_readline.py
hollmmax/zig
d80baa5a5fcbc82b3e2294b398edc20a98737a52
[ "MIT" ]
1
2022-03-30T11:42:37.000Z
2022-03-30T11:42:37.000Z
""" Very minimal unittests for parts of the readline module. """ from contextlib import ExitStack from errno import EIO import locale import os import selectors import subprocess import sys import tempfile import unittest from test.support import import_module, unlink, temp_dir, TESTFN, verbose from test.support.script_helper import assert_python_ok # Skip tests if there is no readline module readline = import_module('readline') if hasattr(readline, "_READLINE_LIBRARY_VERSION"): is_editline = ("EditLine wrapper" in readline._READLINE_LIBRARY_VERSION) else: is_editline = (readline.__doc__ and "libedit" in readline.__doc__) def setUpModule(): if verbose: # Python implementations other than CPython may not have # these private attributes if hasattr(readline, "_READLINE_VERSION"): print(f"readline version: {readline._READLINE_VERSION:#x}") print(f"readline runtime version: {readline._READLINE_RUNTIME_VERSION:#x}") if hasattr(readline, "_READLINE_LIBRARY_VERSION"): print(f"readline library version: {readline._READLINE_LIBRARY_VERSION!r}") print(f"use libedit emulation? {is_editline}") @unittest.skipUnless(hasattr(readline, "clear_history"), "The history update test cannot be run because the " "clear_history method is not available.") class TestHistoryManipulation (unittest.TestCase): """ These tests were added to check that the libedit emulation on OSX and the "real" readline have the same interface for history manipulation. That's why the tests cover only a small subset of the interface. """ def testHistoryUpdates(self): readline.clear_history() readline.add_history("first line") readline.add_history("second line") self.assertEqual(readline.get_history_item(0), None) self.assertEqual(readline.get_history_item(1), "first line") self.assertEqual(readline.get_history_item(2), "second line") readline.replace_history_item(0, "replaced line") self.assertEqual(readline.get_history_item(0), None) self.assertEqual(readline.get_history_item(1), "replaced line") self.assertEqual(readline.get_history_item(2), "second line") self.assertEqual(readline.get_current_history_length(), 2) readline.remove_history_item(0) self.assertEqual(readline.get_history_item(0), None) self.assertEqual(readline.get_history_item(1), "second line") self.assertEqual(readline.get_current_history_length(), 1) @unittest.skipUnless(hasattr(readline, "append_history_file"), "append_history not available") def test_write_read_append(self): hfile = tempfile.NamedTemporaryFile(delete=False) hfile.close() hfilename = hfile.name self.addCleanup(unlink, hfilename) # test write-clear-read == nop readline.clear_history() readline.add_history("first line") readline.add_history("second line") readline.write_history_file(hfilename) readline.clear_history() self.assertEqual(readline.get_current_history_length(), 0) readline.read_history_file(hfilename) self.assertEqual(readline.get_current_history_length(), 2) self.assertEqual(readline.get_history_item(1), "first line") self.assertEqual(readline.get_history_item(2), "second line") # test append readline.append_history_file(1, hfilename) readline.clear_history() readline.read_history_file(hfilename) self.assertEqual(readline.get_current_history_length(), 3) self.assertEqual(readline.get_history_item(1), "first line") self.assertEqual(readline.get_history_item(2), "second line") self.assertEqual(readline.get_history_item(3), "second line") # test 'no such file' behaviour os.unlink(hfilename) with self.assertRaises(FileNotFoundError): readline.append_history_file(1, hfilename) # write_history_file can create the target readline.write_history_file(hfilename) def test_nonascii_history(self): readline.clear_history() try: readline.add_history("entrée 1") except UnicodeEncodeError as err: self.skipTest("Locale cannot encode test data: " + format(err)) readline.add_history("entrée 2") readline.replace_history_item(1, "entrée 22") readline.write_history_file(TESTFN) self.addCleanup(os.remove, TESTFN) readline.clear_history() readline.read_history_file(TESTFN) if is_editline: # An add_history() call seems to be required for get_history_ # item() to register items from the file readline.add_history("dummy") self.assertEqual(readline.get_history_item(1), "entrée 1") self.assertEqual(readline.get_history_item(2), "entrée 22") class TestReadline(unittest.TestCase): @unittest.skipIf(getattr(readline, '_READLINE_VERSION', 0x0601) < 0x0601 and not is_editline, "not supported in this library version") def test_init(self): # Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not # written into stdout when the readline module is imported and stdout # is redirected to a pipe. rc, stdout, stderr = assert_python_ok('-c', 'import readline', TERM='xterm-256color') self.assertEqual(stdout, b'') auto_history_script = """\ import readline readline.set_auto_history({}) input() print("History length:", readline.get_current_history_length()) """ def test_auto_history_enabled(self): output = run_pty(self.auto_history_script.format(True)) # bpo-44949: Sometimes, the newline character is not written at the # end, so don't expect it in the output. self.assertIn(b"History length: 1", output) def test_auto_history_disabled(self): output = run_pty(self.auto_history_script.format(False)) # bpo-44949: Sometimes, the newline character is not written at the # end, so don't expect it in the output. self.assertIn(b"History length: 0", output) @unittest.skipIf(not hasattr(readline, 'set_completion_display_matches_hook'), "function not reimplemented in pypy") def test_nonascii(self): loc = locale.setlocale(locale.LC_CTYPE, None) if loc in ('C', 'POSIX'): # bpo-29240: On FreeBSD, if the LC_CTYPE locale is C or POSIX, # writing and reading non-ASCII bytes into/from a TTY works, but # readline or ncurses ignores non-ASCII bytes on read. self.skipTest(f"the LC_CTYPE locale is {loc!r}") try: readline.add_history("\xEB\xEF") except UnicodeEncodeError as err: self.skipTest("Locale cannot encode test data: " + format(err)) script = r"""import readline is_editline = readline.__doc__ and "libedit" in readline.__doc__ inserted = "[\xEFnserted]" macro = "|t\xEB[after]" set_pre_input_hook = getattr(readline, "set_pre_input_hook", None) if is_editline or not set_pre_input_hook: # The insert_line() call via pre_input_hook() does nothing with Editline, # so include the extra text that would have been inserted here macro = inserted + macro if is_editline: readline.parse_and_bind(r'bind ^B ed-prev-char') readline.parse_and_bind(r'bind "\t" rl_complete') readline.parse_and_bind(r'bind -s ^A "{}"'.format(macro)) else: readline.parse_and_bind(r'Control-b: backward-char') readline.parse_and_bind(r'"\t": complete') readline.parse_and_bind(r'set disable-completion off') readline.parse_and_bind(r'set show-all-if-ambiguous off') readline.parse_and_bind(r'set show-all-if-unmodified off') readline.parse_and_bind(r'Control-a: "{}"'.format(macro)) def pre_input_hook(): readline.insert_text(inserted) readline.redisplay() if set_pre_input_hook: set_pre_input_hook(pre_input_hook) def completer(text, state): if text == "t\xEB": if state == 0: print("text", ascii(text)) print("line", ascii(readline.get_line_buffer())) print("indexes", readline.get_begidx(), readline.get_endidx()) return "t\xEBnt" if state == 1: return "t\xEBxt" if text == "t\xEBx" and state == 0: return "t\xEBxt" return None readline.set_completer(completer) def display(substitution, matches, longest_match_length): print("substitution", ascii(substitution)) print("matches", ascii(matches)) readline.set_completion_display_matches_hook(display) print("result", ascii(input())) print("history", ascii(readline.get_history_item(1))) """ input = b"\x01" # Ctrl-A, expands to "|t\xEB[after]" input += b"\x02" * len("[after]") # Move cursor back input += b"\t\t" # Display possible completions input += b"x\t" # Complete "t\xEBx" -> "t\xEBxt" input += b"\r" output = run_pty(script, input) self.assertIn(b"text 't\\xeb'\r\n", output) self.assertIn(b"line '[\\xefnserted]|t\\xeb[after]'\r\n", output) self.assertIn(b"indexes 11 13\r\n", output) if not is_editline and hasattr(readline, "set_pre_input_hook"): self.assertIn(b"substitution 't\\xeb'\r\n", output) self.assertIn(b"matches ['t\\xebnt', 't\\xebxt']\r\n", output) expected = br"'[\xefnserted]|t\xebxt[after]'" self.assertIn(b"result " + expected + b"\r\n", output) # bpo-45195: Sometimes, the newline character is not written at the # end, so don't expect it in the output. self.assertIn(b"history " + expected, output) # We have 2 reasons to skip this test: # - readline: history size was added in 6.0 # See https://cnswww.cns.cwru.edu/php/chet/readline/CHANGES # - editline: history size is broken on OS X 10.11.6. # Newer versions were not tested yet. @unittest.skipIf(getattr(readline, "_READLINE_VERSION", 0x601) < 0x600, "this readline version does not support history-size") @unittest.skipIf(is_editline, "editline history size configuration is broken") def test_history_size(self): history_size = 10 with temp_dir() as test_dir: inputrc = os.path.join(test_dir, "inputrc") with open(inputrc, "wb") as f: f.write(b"set history-size %d\n" % history_size) history_file = os.path.join(test_dir, "history") with open(history_file, "wb") as f: # history_size * 2 items crashes readline data = b"".join(b"item %d\n" % i for i in range(history_size * 2)) f.write(data) script = """ import os import readline history_file = os.environ["HISTORY_FILE"] readline.read_history_file(history_file) input() readline.write_history_file(history_file) """ env = dict(os.environ) env["INPUTRC"] = inputrc env["HISTORY_FILE"] = history_file run_pty(script, input=b"last input\r", env=env) with open(history_file, "rb") as f: lines = f.readlines() self.assertEqual(len(lines), history_size) self.assertEqual(lines[-1].strip(), b"last input") def run_pty(script, input=b"dummy input\r", env=None): pty = import_module('pty') output = bytearray() [master, slave] = pty.openpty() args = (sys.executable, '-c', script) proc = subprocess.Popen(args, stdin=slave, stdout=slave, stderr=slave, env=env) os.close(slave) with ExitStack() as cleanup: cleanup.enter_context(proc) def terminate(proc): try: proc.terminate() except ProcessLookupError: # Workaround for Open/Net BSD bug (Issue 16762) pass cleanup.callback(terminate, proc) cleanup.callback(os.close, master) # Avoid using DefaultSelector and PollSelector. Kqueue() does not # work with pseudo-terminals on OS X < 10.9 (Issue 20365) and Open # BSD (Issue 20667). Poll() does not work with OS X 10.6 or 10.4 # either (Issue 20472). Hopefully the file descriptor is low enough # to use with select(). sel = cleanup.enter_context(selectors.SelectSelector()) sel.register(master, selectors.EVENT_READ | selectors.EVENT_WRITE) os.set_blocking(master, False) while True: for [_, events] in sel.select(): if events & selectors.EVENT_READ: try: chunk = os.read(master, 0x10000) except OSError as err: # Linux raises EIO when slave is closed (Issue 5380) if err.errno != EIO: raise chunk = b"" if not chunk: return output output.extend(chunk) if events & selectors.EVENT_WRITE: try: input = input[os.write(master, input):] except OSError as err: # Apparently EIO means the slave was closed if err.errno != EIO: raise input = b"" # Stop writing if not input: sel.modify(master, selectors.EVENT_READ) if __name__ == "__main__": unittest.main()
40.244186
87
0.635654
from contextlib import ExitStack from errno import EIO import locale import os import selectors import subprocess import sys import tempfile import unittest from test.support import import_module, unlink, temp_dir, TESTFN, verbose from test.support.script_helper import assert_python_ok readline = import_module('readline') if hasattr(readline, "_READLINE_LIBRARY_VERSION"): is_editline = ("EditLine wrapper" in readline._READLINE_LIBRARY_VERSION) else: is_editline = (readline.__doc__ and "libedit" in readline.__doc__) def setUpModule(): if verbose: if hasattr(readline, "_READLINE_VERSION"): print(f"readline version: {readline._READLINE_VERSION:#x}") print(f"readline runtime version: {readline._READLINE_RUNTIME_VERSION:#x}") if hasattr(readline, "_READLINE_LIBRARY_VERSION"): print(f"readline library version: {readline._READLINE_LIBRARY_VERSION!r}") print(f"use libedit emulation? {is_editline}") @unittest.skipUnless(hasattr(readline, "clear_history"), "The history update test cannot be run because the " "clear_history method is not available.") class TestHistoryManipulation (unittest.TestCase): def testHistoryUpdates(self): readline.clear_history() readline.add_history("first line") readline.add_history("second line") self.assertEqual(readline.get_history_item(0), None) self.assertEqual(readline.get_history_item(1), "first line") self.assertEqual(readline.get_history_item(2), "second line") readline.replace_history_item(0, "replaced line") self.assertEqual(readline.get_history_item(0), None) self.assertEqual(readline.get_history_item(1), "replaced line") self.assertEqual(readline.get_history_item(2), "second line") self.assertEqual(readline.get_current_history_length(), 2) readline.remove_history_item(0) self.assertEqual(readline.get_history_item(0), None) self.assertEqual(readline.get_history_item(1), "second line") self.assertEqual(readline.get_current_history_length(), 1) @unittest.skipUnless(hasattr(readline, "append_history_file"), "append_history not available") def test_write_read_append(self): hfile = tempfile.NamedTemporaryFile(delete=False) hfile.close() hfilename = hfile.name self.addCleanup(unlink, hfilename) readline.clear_history() readline.add_history("first line") readline.add_history("second line") readline.write_history_file(hfilename) readline.clear_history() self.assertEqual(readline.get_current_history_length(), 0) readline.read_history_file(hfilename) self.assertEqual(readline.get_current_history_length(), 2) self.assertEqual(readline.get_history_item(1), "first line") self.assertEqual(readline.get_history_item(2), "second line") readline.append_history_file(1, hfilename) readline.clear_history() readline.read_history_file(hfilename) self.assertEqual(readline.get_current_history_length(), 3) self.assertEqual(readline.get_history_item(1), "first line") self.assertEqual(readline.get_history_item(2), "second line") self.assertEqual(readline.get_history_item(3), "second line") os.unlink(hfilename) with self.assertRaises(FileNotFoundError): readline.append_history_file(1, hfilename) readline.write_history_file(hfilename) def test_nonascii_history(self): readline.clear_history() try: readline.add_history("entrée 1") except UnicodeEncodeError as err: self.skipTest("Locale cannot encode test data: " + format(err)) readline.add_history("entrée 2") readline.replace_history_item(1, "entrée 22") readline.write_history_file(TESTFN) self.addCleanup(os.remove, TESTFN) readline.clear_history() readline.read_history_file(TESTFN) if is_editline: readline.add_history("dummy") self.assertEqual(readline.get_history_item(1), "entrée 1") self.assertEqual(readline.get_history_item(2), "entrée 22") class TestReadline(unittest.TestCase): @unittest.skipIf(getattr(readline, '_READLINE_VERSION', 0x0601) < 0x0601 and not is_editline, "not supported in this library version") def test_init(self): thon_ok('-c', 'import readline', TERM='xterm-256color') self.assertEqual(stdout, b'') auto_history_script = """\ import readline readline.set_auto_history({}) input() print("History length:", readline.get_current_history_length()) """ def test_auto_history_enabled(self): output = run_pty(self.auto_history_script.format(True)) self.assertIn(b"History length: 1", output) def test_auto_history_disabled(self): output = run_pty(self.auto_history_script.format(False)) # bpo-44949: Sometimes, the newline character is not written at the # end, so don't expect it in the output. self.assertIn(b"History length: 0", output) @unittest.skipIf(not hasattr(readline, 'set_completion_display_matches_hook'), "function not reimplemented in pypy") def test_nonascii(self): loc = locale.setlocale(locale.LC_CTYPE, None) if loc in ('C', 'POSIX'): self.skipTest(f"the LC_CTYPE locale is {loc!r}") try: readline.add_history("\xEB\xEF") except UnicodeEncodeError as err: self.skipTest("Locale cannot encode test data: " + format(err)) script = r"""import readline is_editline = readline.__doc__ and "libedit" in readline.__doc__ inserted = "[\xEFnserted]" macro = "|t\xEB[after]" set_pre_input_hook = getattr(readline, "set_pre_input_hook", None) if is_editline or not set_pre_input_hook: # The insert_line() call via pre_input_hook() does nothing with Editline, # so include the extra text that would have been inserted here macro = inserted + macro if is_editline: readline.parse_and_bind(r'bind ^B ed-prev-char') readline.parse_and_bind(r'bind "\t" rl_complete') readline.parse_and_bind(r'bind -s ^A "{}"'.format(macro)) else: readline.parse_and_bind(r'Control-b: backward-char') readline.parse_and_bind(r'"\t": complete') readline.parse_and_bind(r'set disable-completion off') readline.parse_and_bind(r'set show-all-if-ambiguous off') readline.parse_and_bind(r'set show-all-if-unmodified off') readline.parse_and_bind(r'Control-a: "{}"'.format(macro)) def pre_input_hook(): readline.insert_text(inserted) readline.redisplay() if set_pre_input_hook: set_pre_input_hook(pre_input_hook) def completer(text, state): if text == "t\xEB": if state == 0: print("text", ascii(text)) print("line", ascii(readline.get_line_buffer())) print("indexes", readline.get_begidx(), readline.get_endidx()) return "t\xEBnt" if state == 1: return "t\xEBxt" if text == "t\xEBx" and state == 0: return "t\xEBxt" return None readline.set_completer(completer) def display(substitution, matches, longest_match_length): print("substitution", ascii(substitution)) print("matches", ascii(matches)) readline.set_completion_display_matches_hook(display) print("result", ascii(input())) print("history", ascii(readline.get_history_item(1))) """ input = b"\x01" input += b"\x02" * len("[after]") input += b"\t\t" input += b"x\t" input += b"\r" output = run_pty(script, input) self.assertIn(b"text 't\\xeb'\r\n", output) self.assertIn(b"line '[\\xefnserted]|t\\xeb[after]'\r\n", output) self.assertIn(b"indexes 11 13\r\n", output) if not is_editline and hasattr(readline, "set_pre_input_hook"): self.assertIn(b"substitution 't\\xeb'\r\n", output) self.assertIn(b"matches ['t\\xebnt', 't\\xebxt']\r\n", output) expected = br"'[\xefnserted]|t\xebxt[after]'" self.assertIn(b"result " + expected + b"\r\n", output) self.assertIn(b"history " + expected, output) # We have 2 reasons to skip this test: # - readline: history size was added in 6.0 # See https://cnswww.cns.cwru.edu/php/chet/readline/CHANGES # - editline: history size is broken on OS X 10.11.6. # Newer versions were not tested yet. @unittest.skipIf(getattr(readline, "_READLINE_VERSION", 0x601) < 0x600, "this readline version does not support history-size") @unittest.skipIf(is_editline, "editline history size configuration is broken") def test_history_size(self): history_size = 10 with temp_dir() as test_dir: inputrc = os.path.join(test_dir, "inputrc") with open(inputrc, "wb") as f: f.write(b"set history-size %d\n" % history_size) history_file = os.path.join(test_dir, "history") with open(history_file, "wb") as f: # history_size * 2 items crashes readline data = b"".join(b"item %d\n" % i for i in range(history_size * 2)) f.write(data) script = """ import os import readline history_file = os.environ["HISTORY_FILE"] readline.read_history_file(history_file) input() readline.write_history_file(history_file) """ env = dict(os.environ) env["INPUTRC"] = inputrc env["HISTORY_FILE"] = history_file run_pty(script, input=b"last input\r", env=env) with open(history_file, "rb") as f: lines = f.readlines() self.assertEqual(len(lines), history_size) self.assertEqual(lines[-1].strip(), b"last input") def run_pty(script, input=b"dummy input\r", env=None): pty = import_module('pty') output = bytearray() [master, slave] = pty.openpty() args = (sys.executable, '-c', script) proc = subprocess.Popen(args, stdin=slave, stdout=slave, stderr=slave, env=env) os.close(slave) with ExitStack() as cleanup: cleanup.enter_context(proc) def terminate(proc): try: proc.terminate() except ProcessLookupError: # Workaround for Open/Net BSD bug (Issue 16762) pass cleanup.callback(terminate, proc) cleanup.callback(os.close, master) # Avoid using DefaultSelector and PollSelector. Kqueue() does not # work with pseudo-terminals on OS X < 10.9 (Issue 20365) and Open # BSD (Issue 20667). Poll() does not work with OS X 10.6 or 10.4 # either (Issue 20472). Hopefully the file descriptor is low enough # to use with select(). sel = cleanup.enter_context(selectors.SelectSelector()) sel.register(master, selectors.EVENT_READ | selectors.EVENT_WRITE) os.set_blocking(master, False) while True: for [_, events] in sel.select(): if events & selectors.EVENT_READ: try: chunk = os.read(master, 0x10000) except OSError as err: # Linux raises EIO when slave is closed (Issue 5380) if err.errno != EIO: raise chunk = b"" if not chunk: return output output.extend(chunk) if events & selectors.EVENT_WRITE: try: input = input[os.write(master, input):] except OSError as err: # Apparently EIO means the slave was closed if err.errno != EIO: raise input = b"" # Stop writing if not input: sel.modify(master, selectors.EVENT_READ) if __name__ == "__main__": unittest.main()
true
true
f711bb2f78fd2588291d2748fdc4a3155a112679
11,194
py
Python
unit_tests/utilities/test_zaza_utilities_openstack_upgrade.py
gnuoy/zaza-openstack-tests
0546e01b627d7e0a785ef801e88743480e94cbed
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
unit_tests/utilities/test_zaza_utilities_openstack_upgrade.py
gnuoy/zaza-openstack-tests
0546e01b627d7e0a785ef801e88743480e94cbed
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
unit_tests/utilities/test_zaza_utilities_openstack_upgrade.py
gnuoy/zaza-openstack-tests
0546e01b627d7e0a785ef801e88743480e94cbed
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# Copyright 2019 Canonical Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy import mock import unit_tests.utils as ut_utils import zaza.openstack.utilities.openstack_upgrade as openstack_upgrade class TestOpenStackUpgradeUtils(ut_utils.BaseTestCase): async def _arun_action_on_units(self, units, cmd, model_name=None, raise_on_failure=True): pass def setUp(self): super(TestOpenStackUpgradeUtils, self).setUp() self.patch_object( openstack_upgrade.zaza.model, "async_run_action_on_units") self.async_run_action_on_units.side_effect = self._arun_action_on_units self.patch_object( openstack_upgrade.zaza.model, "get_units") self.juju_status = mock.MagicMock() self.patch_object( openstack_upgrade.zaza.model, "get_status", return_value=self.juju_status) self.patch_object( openstack_upgrade.zaza.model, "set_application_config") self.patch_object( openstack_upgrade.zaza.model, "get_application_config") def _get_application_config(app, model_name=None): app_config = { 'ceph-mon': {'verbose': True, 'source': 'old-src'}, 'neutron-openvswitch': {'verbose': True}, 'ntp': {'verbose': True}, 'percona-cluster': {'verbose': True, 'source': 'old-src'}, 'cinder': { 'verbose': True, 'openstack-origin': 'old-src', 'action-managed-upgrade': False}, 'neutron-api': { 'verbose': True, 'openstack-origin': 'old-src', 'action-managed-upgrade': False}, 'nova-compute': { 'verbose': True, 'openstack-origin': 'old-src', 'action-managed-upgrade': False}, } return app_config[app] self.get_application_config.side_effect = _get_application_config self.juju_status.applications = { 'mydb': { # Filter as it is on UPGRADE_EXCLUDE_LIST 'charm': 'cs:percona-cluster'}, 'neutron-openvswitch': { # Filter as it is a subordinates 'charm': 'cs:neutron-openvswitch', 'subordinate-to': 'nova-compute'}, 'ntp': { # Filter as it has no source option 'charm': 'cs:ntp'}, 'nova-compute': { 'charm': 'cs:nova-compute', 'units': { 'nova-compute/0': { 'subordinates': { 'neutron-openvswitch/2': { 'charm': 'cs:neutron-openvswitch-22'}}}}}, 'cinder': { 'charm': 'cs:cinder-23', 'units': { 'cinder/1': { 'subordinates': { 'cinder-hacluster/0': { 'charm': 'cs:hacluster-42'}, 'cinder-ceph/3': { 'charm': 'cs:cinder-ceph-2'}}}}}} def test_pause_units(self): openstack_upgrade.pause_units(['cinder/1', 'glance/2']) self.async_run_action_on_units.assert_called_once_with( ['cinder/1', 'glance/2'], 'pause', model_name=None, raise_on_failure=True) def test_resume_units(self): openstack_upgrade.resume_units(['cinder/1', 'glance/2']) self.async_run_action_on_units.assert_called_once_with( ['cinder/1', 'glance/2'], 'resume', model_name=None, raise_on_failure=True) def test_action_unit_upgrade(self): openstack_upgrade.action_unit_upgrade(['cinder/1', 'glance/2']) self.async_run_action_on_units.assert_called_once_with( ['cinder/1', 'glance/2'], 'openstack-upgrade', model_name=None, raise_on_failure=True) def test_action_upgrade_group(self): self.patch_object(openstack_upgrade, "pause_units") self.patch_object(openstack_upgrade, "action_unit_upgrade") self.patch_object(openstack_upgrade, "resume_units") mock_nova_compute_0 = mock.MagicMock() mock_nova_compute_0.entity_id = 'nova-compute/0' mock_cinder_1 = mock.MagicMock() mock_cinder_1.entity_id = 'cinder/1' units = { 'nova-compute': [mock_nova_compute_0], 'cinder': [mock_cinder_1]} self.get_units.side_effect = lambda app, model_name: units[app] openstack_upgrade.action_upgrade_group(['nova-compute', 'cinder']) pause_calls = [ mock.call(['cinder-hacluster/0'], model_name=None), mock.call(['nova-compute/0', 'cinder/1'], model_name=None)] self.pause_units.assert_has_calls(pause_calls, any_order=False) action_unit_upgrade_calls = [ mock.call(['nova-compute/0', 'cinder/1'], model_name=None)] self.action_unit_upgrade.assert_has_calls( action_unit_upgrade_calls, any_order=False) resume_calls = [ mock.call(['nova-compute/0', 'cinder/1'], model_name=None), mock.call(['cinder-hacluster/0'], model_name=None)] self.resume_units.assert_has_calls(resume_calls, any_order=False) def test_set_upgrade_application_config(self): openstack_upgrade.set_upgrade_application_config( ['neutron-api', 'cinder'], 'new-src') set_app_calls = [ mock.call( 'neutron-api', { 'openstack-origin': 'new-src', 'action-managed-upgrade': 'True'}, model_name=None), mock.call( 'cinder', { 'openstack-origin': 'new-src', 'action-managed-upgrade': 'True'}, model_name=None)] self.set_application_config.assert_has_calls(set_app_calls) self.set_application_config.reset_mock() openstack_upgrade.set_upgrade_application_config( ['percona-cluster'], 'new-src', action_managed=False) self.set_application_config.assert_called_once_with( 'percona-cluster', {'source': 'new-src'}, model_name=None) def test__extract_charm_name_from_url(self): self.assertEqual( openstack_upgrade._extract_charm_name_from_url( 'local:bionic/heat-12'), 'heat') self.assertEqual( openstack_upgrade._extract_charm_name_from_url( 'cs:bionic/heat-12'), 'heat') self.assertEqual( openstack_upgrade._extract_charm_name_from_url('cs:heat'), 'heat') def test_get_upgrade_candidates(self): expect = copy.deepcopy(self.juju_status.applications) del expect['mydb'] # Filter as it is on UPGRADE_EXCLUDE_LIST del expect['ntp'] # Filter as it has no source option del expect['neutron-openvswitch'] # Filter as it is a subordinates self.assertEqual( openstack_upgrade.get_upgrade_candidates(), expect) def test_get_upgrade_groups(self): self.assertEqual( openstack_upgrade.get_upgrade_groups(), { 'Compute': ['nova-compute'], 'Control Plane': ['cinder'], 'Core Identity': [], 'Storage': [], 'sweep_up': []}) def test_is_action_upgradable(self): self.assertTrue( openstack_upgrade.is_action_upgradable('cinder')) self.assertFalse( openstack_upgrade.is_action_upgradable('percona-cluster')) def test_run_action_upgrade(self): self.patch_object(openstack_upgrade, "set_upgrade_application_config") self.patch_object(openstack_upgrade, "action_upgrade_group") openstack_upgrade.run_action_upgrade( ['cinder', 'neutron-api'], 'new-src') self.set_upgrade_application_config.assert_called_once_with( ['cinder', 'neutron-api'], 'new-src', model_name=None) self.action_upgrade_group.assert_called_once_with( ['cinder', 'neutron-api'], model_name=None) def test_run_all_in_one_upgrade(self): self.patch_object(openstack_upgrade, "set_upgrade_application_config") self.patch_object( openstack_upgrade.zaza.model, 'block_until_all_units_idle') openstack_upgrade.run_all_in_one_upgrade( ['percona-cluster'], 'new-src') self.set_upgrade_application_config.assert_called_once_with( ['percona-cluster'], 'new-src', action_managed=False, model_name=None) self.block_until_all_units_idle.assert_called_once_with() def test_run_upgrade(self): self.patch_object(openstack_upgrade, "run_all_in_one_upgrade") self.patch_object(openstack_upgrade, "run_action_upgrade") openstack_upgrade.run_upgrade( ['cinder', 'neutron-api', 'ceph-mon'], 'new-src') self.run_all_in_one_upgrade.assert_called_once_with( ['ceph-mon'], 'new-src', model_name=None) self.run_action_upgrade.assert_called_once_with( ['cinder', 'neutron-api'], 'new-src', model_name=None) def test_run_upgrade_tests(self): self.patch_object(openstack_upgrade, "run_upgrade") self.patch_object(openstack_upgrade, "get_upgrade_groups") self.get_upgrade_groups.return_value = { 'Compute': ['nova-compute'], 'Control Plane': ['cinder', 'neutron-api'], 'Core Identity': ['keystone'], 'Storage': ['ceph-mon'], 'sweep_up': ['designate']} openstack_upgrade.run_upgrade_tests('new-src', model_name=None) run_upgrade_calls = [ mock.call(['keystone'], 'new-src', model_name=None), mock.call(['ceph-mon'], 'new-src', model_name=None), mock.call(['cinder', 'neutron-api'], 'new-src', model_name=None), mock.call(['nova-compute'], 'new-src', model_name=None), mock.call(['designate'], 'new-src', model_name=None)] self.run_upgrade.assert_has_calls(run_upgrade_calls, any_order=False)
40.854015
79
0.58317
import copy import mock import unit_tests.utils as ut_utils import zaza.openstack.utilities.openstack_upgrade as openstack_upgrade class TestOpenStackUpgradeUtils(ut_utils.BaseTestCase): async def _arun_action_on_units(self, units, cmd, model_name=None, raise_on_failure=True): pass def setUp(self): super(TestOpenStackUpgradeUtils, self).setUp() self.patch_object( openstack_upgrade.zaza.model, "async_run_action_on_units") self.async_run_action_on_units.side_effect = self._arun_action_on_units self.patch_object( openstack_upgrade.zaza.model, "get_units") self.juju_status = mock.MagicMock() self.patch_object( openstack_upgrade.zaza.model, "get_status", return_value=self.juju_status) self.patch_object( openstack_upgrade.zaza.model, "set_application_config") self.patch_object( openstack_upgrade.zaza.model, "get_application_config") def _get_application_config(app, model_name=None): app_config = { 'ceph-mon': {'verbose': True, 'source': 'old-src'}, 'neutron-openvswitch': {'verbose': True}, 'ntp': {'verbose': True}, 'percona-cluster': {'verbose': True, 'source': 'old-src'}, 'cinder': { 'verbose': True, 'openstack-origin': 'old-src', 'action-managed-upgrade': False}, 'neutron-api': { 'verbose': True, 'openstack-origin': 'old-src', 'action-managed-upgrade': False}, 'nova-compute': { 'verbose': True, 'openstack-origin': 'old-src', 'action-managed-upgrade': False}, } return app_config[app] self.get_application_config.side_effect = _get_application_config self.juju_status.applications = { 'mydb': { 'charm': 'cs:percona-cluster'}, 'neutron-openvswitch': { 'charm': 'cs:neutron-openvswitch', 'subordinate-to': 'nova-compute'}, 'ntp': { 'charm': 'cs:ntp'}, 'nova-compute': { 'charm': 'cs:nova-compute', 'units': { 'nova-compute/0': { 'subordinates': { 'neutron-openvswitch/2': { 'charm': 'cs:neutron-openvswitch-22'}}}}}, 'cinder': { 'charm': 'cs:cinder-23', 'units': { 'cinder/1': { 'subordinates': { 'cinder-hacluster/0': { 'charm': 'cs:hacluster-42'}, 'cinder-ceph/3': { 'charm': 'cs:cinder-ceph-2'}}}}}} def test_pause_units(self): openstack_upgrade.pause_units(['cinder/1', 'glance/2']) self.async_run_action_on_units.assert_called_once_with( ['cinder/1', 'glance/2'], 'pause', model_name=None, raise_on_failure=True) def test_resume_units(self): openstack_upgrade.resume_units(['cinder/1', 'glance/2']) self.async_run_action_on_units.assert_called_once_with( ['cinder/1', 'glance/2'], 'resume', model_name=None, raise_on_failure=True) def test_action_unit_upgrade(self): openstack_upgrade.action_unit_upgrade(['cinder/1', 'glance/2']) self.async_run_action_on_units.assert_called_once_with( ['cinder/1', 'glance/2'], 'openstack-upgrade', model_name=None, raise_on_failure=True) def test_action_upgrade_group(self): self.patch_object(openstack_upgrade, "pause_units") self.patch_object(openstack_upgrade, "action_unit_upgrade") self.patch_object(openstack_upgrade, "resume_units") mock_nova_compute_0 = mock.MagicMock() mock_nova_compute_0.entity_id = 'nova-compute/0' mock_cinder_1 = mock.MagicMock() mock_cinder_1.entity_id = 'cinder/1' units = { 'nova-compute': [mock_nova_compute_0], 'cinder': [mock_cinder_1]} self.get_units.side_effect = lambda app, model_name: units[app] openstack_upgrade.action_upgrade_group(['nova-compute', 'cinder']) pause_calls = [ mock.call(['cinder-hacluster/0'], model_name=None), mock.call(['nova-compute/0', 'cinder/1'], model_name=None)] self.pause_units.assert_has_calls(pause_calls, any_order=False) action_unit_upgrade_calls = [ mock.call(['nova-compute/0', 'cinder/1'], model_name=None)] self.action_unit_upgrade.assert_has_calls( action_unit_upgrade_calls, any_order=False) resume_calls = [ mock.call(['nova-compute/0', 'cinder/1'], model_name=None), mock.call(['cinder-hacluster/0'], model_name=None)] self.resume_units.assert_has_calls(resume_calls, any_order=False) def test_set_upgrade_application_config(self): openstack_upgrade.set_upgrade_application_config( ['neutron-api', 'cinder'], 'new-src') set_app_calls = [ mock.call( 'neutron-api', { 'openstack-origin': 'new-src', 'action-managed-upgrade': 'True'}, model_name=None), mock.call( 'cinder', { 'openstack-origin': 'new-src', 'action-managed-upgrade': 'True'}, model_name=None)] self.set_application_config.assert_has_calls(set_app_calls) self.set_application_config.reset_mock() openstack_upgrade.set_upgrade_application_config( ['percona-cluster'], 'new-src', action_managed=False) self.set_application_config.assert_called_once_with( 'percona-cluster', {'source': 'new-src'}, model_name=None) def test__extract_charm_name_from_url(self): self.assertEqual( openstack_upgrade._extract_charm_name_from_url( 'local:bionic/heat-12'), 'heat') self.assertEqual( openstack_upgrade._extract_charm_name_from_url( 'cs:bionic/heat-12'), 'heat') self.assertEqual( openstack_upgrade._extract_charm_name_from_url('cs:heat'), 'heat') def test_get_upgrade_candidates(self): expect = copy.deepcopy(self.juju_status.applications) del expect['mydb'] del expect['ntp'] del expect['neutron-openvswitch'] self.assertEqual( openstack_upgrade.get_upgrade_candidates(), expect) def test_get_upgrade_groups(self): self.assertEqual( openstack_upgrade.get_upgrade_groups(), { 'Compute': ['nova-compute'], 'Control Plane': ['cinder'], 'Core Identity': [], 'Storage': [], 'sweep_up': []}) def test_is_action_upgradable(self): self.assertTrue( openstack_upgrade.is_action_upgradable('cinder')) self.assertFalse( openstack_upgrade.is_action_upgradable('percona-cluster')) def test_run_action_upgrade(self): self.patch_object(openstack_upgrade, "set_upgrade_application_config") self.patch_object(openstack_upgrade, "action_upgrade_group") openstack_upgrade.run_action_upgrade( ['cinder', 'neutron-api'], 'new-src') self.set_upgrade_application_config.assert_called_once_with( ['cinder', 'neutron-api'], 'new-src', model_name=None) self.action_upgrade_group.assert_called_once_with( ['cinder', 'neutron-api'], model_name=None) def test_run_all_in_one_upgrade(self): self.patch_object(openstack_upgrade, "set_upgrade_application_config") self.patch_object( openstack_upgrade.zaza.model, 'block_until_all_units_idle') openstack_upgrade.run_all_in_one_upgrade( ['percona-cluster'], 'new-src') self.set_upgrade_application_config.assert_called_once_with( ['percona-cluster'], 'new-src', action_managed=False, model_name=None) self.block_until_all_units_idle.assert_called_once_with() def test_run_upgrade(self): self.patch_object(openstack_upgrade, "run_all_in_one_upgrade") self.patch_object(openstack_upgrade, "run_action_upgrade") openstack_upgrade.run_upgrade( ['cinder', 'neutron-api', 'ceph-mon'], 'new-src') self.run_all_in_one_upgrade.assert_called_once_with( ['ceph-mon'], 'new-src', model_name=None) self.run_action_upgrade.assert_called_once_with( ['cinder', 'neutron-api'], 'new-src', model_name=None) def test_run_upgrade_tests(self): self.patch_object(openstack_upgrade, "run_upgrade") self.patch_object(openstack_upgrade, "get_upgrade_groups") self.get_upgrade_groups.return_value = { 'Compute': ['nova-compute'], 'Control Plane': ['cinder', 'neutron-api'], 'Core Identity': ['keystone'], 'Storage': ['ceph-mon'], 'sweep_up': ['designate']} openstack_upgrade.run_upgrade_tests('new-src', model_name=None) run_upgrade_calls = [ mock.call(['keystone'], 'new-src', model_name=None), mock.call(['ceph-mon'], 'new-src', model_name=None), mock.call(['cinder', 'neutron-api'], 'new-src', model_name=None), mock.call(['nova-compute'], 'new-src', model_name=None), mock.call(['designate'], 'new-src', model_name=None)] self.run_upgrade.assert_has_calls(run_upgrade_calls, any_order=False)
true
true
f711bb312eb8c2c5b8dee0e29602c0929c8089ea
592
py
Python
Ragnvald/apps/login_reg/forms.py
DezzaPR0XY/Ragnvald
90cdeb6cf9a73b599a05b6e37aab8e151fb221aa
[ "MIT" ]
null
null
null
Ragnvald/apps/login_reg/forms.py
DezzaPR0XY/Ragnvald
90cdeb6cf9a73b599a05b6e37aab8e151fb221aa
[ "MIT" ]
null
null
null
Ragnvald/apps/login_reg/forms.py
DezzaPR0XY/Ragnvald
90cdeb6cf9a73b599a05b6e37aab8e151fb221aa
[ "MIT" ]
null
null
null
from django import forms from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User # from .models import User class SignUpForm(UserCreationForm): username = forms.CharField(max_length=30, required=True) first_name = forms.CharField(max_length=30, required=True) last_name = forms.CharField(max_length=30, required=True) email = forms.EmailField(max_length=254, help_text='Required. Inform a valid email address.') class Meta: model = User fields = ('first_name', 'last_name', 'email', 'password1', 'password2',)
39.466667
98
0.739865
from django import forms from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User class SignUpForm(UserCreationForm): username = forms.CharField(max_length=30, required=True) first_name = forms.CharField(max_length=30, required=True) last_name = forms.CharField(max_length=30, required=True) email = forms.EmailField(max_length=254, help_text='Required. Inform a valid email address.') class Meta: model = User fields = ('first_name', 'last_name', 'email', 'password1', 'password2',)
true
true
f711bb41ebfd119a5f6b27e75ab19ba25b39268f
1,357
py
Python
env/lib/python3.6/site-packages/defusedxml/__init__.py
IERT-Prayagraj/iert_django_webapp
c0ad52fda672de52f2f18e543f076888d1ead1b3
[ "MIT" ]
null
null
null
env/lib/python3.6/site-packages/defusedxml/__init__.py
IERT-Prayagraj/iert_django_webapp
c0ad52fda672de52f2f18e543f076888d1ead1b3
[ "MIT" ]
null
null
null
env/lib/python3.6/site-packages/defusedxml/__init__.py
IERT-Prayagraj/iert_django_webapp
c0ad52fda672de52f2f18e543f076888d1ead1b3
[ "MIT" ]
null
null
null
# defusedxml # # Copyright (c) 2013 by Christian Heimes <christian@python.org> # Licensed to PSF under a Contributor Agreement. # See https://www.python.org/psf/license for licensing details. """Defuse XML bomb denial of service vulnerabilities """ from __future__ import print_function, absolute_import from .common import ( DefusedXmlException, DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, NotSupportedError, _apply_defusing, ) def defuse_stdlib(): """Monkey patch and defuse all stdlib packages :warning: The monkey patch is an EXPERIMETNAL feature. """ defused = {} from . import cElementTree from . import ElementTree from . import minidom from . import pulldom from . import sax from . import expatbuilder from . import expatreader from . import xmlrpc xmlrpc.monkey_patch() defused[xmlrpc] = None for defused_mod in [ cElementTree, ElementTree, minidom, pulldom, sax, expatbuilder, expatreader, ]: stdlib_mod = _apply_defusing(defused_mod) defused[defused_mod] = stdlib_mod return defused __version__ = "0.6.0rc1" __all__ = [ "DefusedXmlException", "DTDForbidden", "EntitiesForbidden", "ExternalReferenceForbidden", "NotSupportedError", ]
21.539683
63
0.677966
from __future__ import print_function, absolute_import from .common import ( DefusedXmlException, DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, NotSupportedError, _apply_defusing, ) def defuse_stdlib(): defused = {} from . import cElementTree from . import ElementTree from . import minidom from . import pulldom from . import sax from . import expatbuilder from . import expatreader from . import xmlrpc xmlrpc.monkey_patch() defused[xmlrpc] = None for defused_mod in [ cElementTree, ElementTree, minidom, pulldom, sax, expatbuilder, expatreader, ]: stdlib_mod = _apply_defusing(defused_mod) defused[defused_mod] = stdlib_mod return defused __version__ = "0.6.0rc1" __all__ = [ "DefusedXmlException", "DTDForbidden", "EntitiesForbidden", "ExternalReferenceForbidden", "NotSupportedError", ]
true
true
f711bb613473ca1c7982405273c36b1db340b656
1,989
py
Python
brocoli/exceptions.py
mesocentre-mcia/brocoli
95ba415791d9fafe9cef7124562bec5e2b6f2672
[ "BSD-3-Clause" ]
4
2018-06-28T09:50:40.000Z
2021-11-29T09:35:30.000Z
brocoli/exceptions.py
mesocentre-mcia/brocoli
95ba415791d9fafe9cef7124562bec5e2b6f2672
[ "BSD-3-Clause" ]
11
2018-01-19T10:11:39.000Z
2021-12-14T11:57:26.000Z
brocoli/exceptions.py
mesocentre-mcia/brocoli
95ba415791d9fafe9cef7124562bec5e2b6f2672
[ "BSD-3-Clause" ]
2
2017-12-17T21:59:46.000Z
2019-09-23T11:38:18.000Z
import os import errno import traceback from six.moves import tkinter_messagebox as messagebox from six import print_ class BrocoliError(Exception): def __init__(self, exception): self.exception = exception def __str__(self): return type(self.exception).__name__ + ': ' + str(self.exception) class ConnectionError(BrocoliError): pass class NetworkError(BrocoliError): pass class FileNotFoundError(BrocoliError): pass class CatalogLogicError(BrocoliError): pass class ChecksumError(Exception): pass def ioerror(no): return IOError(no, os.strerror(no)) def handle_catalog_exceptions(method): """ Method decorator that presents Brocoli exceptions to the user with messages """ def method_wrapper(self, *args, **kwargs): try: return method(self, *args, **kwargs) except ConnectionError as e: messagebox.showerror('Catalog Connection Error', ('Connection failed: ' + '{}').format(str(e))) except FileNotFoundError as e: messagebox.showerror('File Not Found', ('Catalog file was not found: ' + '{}').format(str(e))) except CatalogLogicError as e: messagebox.showerror('Catalog Logic Error', ('Catalog logic error occurred: ' + '{}').format(str(e))) except ChecksumError as e: messagebox.showerror('Checksum Error', ('Checksum error occurred: ' + '{}').format(str(e))) except Exception as e: messagebox.showerror('Unknown Error', ('Some unknown exception occurred: ' + '{}').format(str(e))) print_(traceback.format_exc()) return method_wrapper
28.826087
79
0.554047
import os import errno import traceback from six.moves import tkinter_messagebox as messagebox from six import print_ class BrocoliError(Exception): def __init__(self, exception): self.exception = exception def __str__(self): return type(self.exception).__name__ + ': ' + str(self.exception) class ConnectionError(BrocoliError): pass class NetworkError(BrocoliError): pass class FileNotFoundError(BrocoliError): pass class CatalogLogicError(BrocoliError): pass class ChecksumError(Exception): pass def ioerror(no): return IOError(no, os.strerror(no)) def handle_catalog_exceptions(method): def method_wrapper(self, *args, **kwargs): try: return method(self, *args, **kwargs) except ConnectionError as e: messagebox.showerror('Catalog Connection Error', ('Connection failed: ' + '{}').format(str(e))) except FileNotFoundError as e: messagebox.showerror('File Not Found', ('Catalog file was not found: ' + '{}').format(str(e))) except CatalogLogicError as e: messagebox.showerror('Catalog Logic Error', ('Catalog logic error occurred: ' + '{}').format(str(e))) except ChecksumError as e: messagebox.showerror('Checksum Error', ('Checksum error occurred: ' + '{}').format(str(e))) except Exception as e: messagebox.showerror('Unknown Error', ('Some unknown exception occurred: ' + '{}').format(str(e))) print_(traceback.format_exc()) return method_wrapper
true
true
f711bbbc339573d1744df69fd2b79a94a7b3f1b9
2,615
py
Python
gateway/builders/authorization_builder.py
TarlanPayments/gw-python-client
a0dd5292c877ab06bf549693a1bfc9fb06ef9d19
[ "MIT" ]
null
null
null
gateway/builders/authorization_builder.py
TarlanPayments/gw-python-client
a0dd5292c877ab06bf549693a1bfc9fb06ef9d19
[ "MIT" ]
null
null
null
gateway/builders/authorization_builder.py
TarlanPayments/gw-python-client
a0dd5292c877ab06bf549693a1bfc9fb06ef9d19
[ "MIT" ]
null
null
null
# The MIT License # # Copyright (c) 2017 Tarlan Payments. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. class AuthorizationBuilder(object): def __init__(self, __client_auth_data_set, __client_mandatory_fields): from gateway.data_sets.request_parameters import ( RequestParameters, RequestParametersTypes ) self.__data_sets = RequestParameters self.__data_types = RequestParametersTypes self.__auth_mandatory_fields = __client_mandatory_fields self.__auth_data_set = __client_auth_data_set def add_account_guid(self, guid=None): """ Tarlan Payments Merchant Account GUID. Args: guid (str): Tarlan Payments Merchant Account GUID. """ self.__auth_mandatory_fields[self.__data_sets.AUTH_DATA_ACCOUNT_GUID] = self.__data_types.AUTH_DATA_ACCOUNT_GUID self.__auth_data_set[self.__data_sets.AUTH_DATA_ACCOUNT_GUID] = guid def add_secret_key(self, value=None): """ Tarlan Payments Merchant Password Args: value (str): Tarlan Payments Merchant Password """ self.__auth_mandatory_fields[self.__data_sets.AUTH_DATA_SECRET_KEY] = self.__data_types.AUTH_DATA_SECRET_KEY self.__auth_data_set[self.__data_sets.AUTH_DATA_SECRET_KEY] = value def add_session_id(self, id_value=None): """ Tarlan Payments Gateway Session ID Args: id_value (str): Tarlan Payments Gateway Session ID """ self.__auth_data_set[self.__data_sets.AUTH_DATA_SECRET_KEY] = id_value
41.507937
120
0.728489
class AuthorizationBuilder(object): def __init__(self, __client_auth_data_set, __client_mandatory_fields): from gateway.data_sets.request_parameters import ( RequestParameters, RequestParametersTypes ) self.__data_sets = RequestParameters self.__data_types = RequestParametersTypes self.__auth_mandatory_fields = __client_mandatory_fields self.__auth_data_set = __client_auth_data_set def add_account_guid(self, guid=None): self.__auth_mandatory_fields[self.__data_sets.AUTH_DATA_ACCOUNT_GUID] = self.__data_types.AUTH_DATA_ACCOUNT_GUID self.__auth_data_set[self.__data_sets.AUTH_DATA_ACCOUNT_GUID] = guid def add_secret_key(self, value=None): self.__auth_mandatory_fields[self.__data_sets.AUTH_DATA_SECRET_KEY] = self.__data_types.AUTH_DATA_SECRET_KEY self.__auth_data_set[self.__data_sets.AUTH_DATA_SECRET_KEY] = value def add_session_id(self, id_value=None): self.__auth_data_set[self.__data_sets.AUTH_DATA_SECRET_KEY] = id_value
true
true
f711be4592dcc19aa285868a4d79787b82aa9f97
155
py
Python
1.py
maweefeng/baidubaikespider
19ce975996af3a421c1e981ca83f2c269532dbb9
[ "MIT" ]
1
2019-05-29T09:59:49.000Z
2019-05-29T09:59:49.000Z
1.py
maweefeng/baidubaikespider
19ce975996af3a421c1e981ca83f2c269532dbb9
[ "MIT" ]
null
null
null
1.py
maweefeng/baidubaikespider
19ce975996af3a421c1e981ca83f2c269532dbb9
[ "MIT" ]
null
null
null
from baidu_spider import spider_main rooturl = 'https://baike.baidu.com/item/Python/407313' obj_spider = spider_main.SpiderMain() obj_spider.craw(rooturl)
31
54
0.812903
from baidu_spider import spider_main rooturl = 'https://baike.baidu.com/item/Python/407313' obj_spider = spider_main.SpiderMain() obj_spider.craw(rooturl)
true
true
f711be502f622c5ef3826565718977a9fa0dbc19
4,877
py
Python
utest/running/test_userlibrary.py
Kompakti/robotframework
3ac75d5212f544018ef1cc99a8b68c222715df5f
[ "ECL-2.0", "Apache-2.0" ]
2
2017-08-10T16:14:15.000Z
2021-11-28T10:47:46.000Z
utest/running/test_userlibrary.py
Kompakti/robotframework
3ac75d5212f544018ef1cc99a8b68c222715df5f
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
utest/running/test_userlibrary.py
Kompakti/robotframework
3ac75d5212f544018ef1cc99a8b68c222715df5f
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
import unittest import os from robot.running import userkeyword from robot.running.model import ResourceFile, UserKeyword from robot.running.userkeyword import UserLibrary from robot.errors import DataError from robot.utils.asserts import (assert_equal, assert_none, assert_raises_with_msg, assert_true) class UserHandlerStub: def __init__(self, kwdata, library): self.name = kwdata.name self.libname = library if kwdata.name == 'FAIL': raise Exception('Expected failure') def create(self, name): return self class EmbeddedArgsHandlerStub: def __init__(self, kwdata, library, embedded): self.name = kwdata.name if kwdata.name != 'Embedded ${arg}': raise TypeError def matches(self, name): return name == self.name class TestUserLibrary(unittest.TestCase): def setUp(self): self._orig_user_handler = userkeyword.UserKeywordHandler self._orig_embedded_handler = userkeyword.EmbeddedArgumentsHandler userkeyword.UserKeywordHandler = UserHandlerStub userkeyword.EmbeddedArgumentsHandler = EmbeddedArgsHandlerStub def tearDown(self): userkeyword.UserKeywordHandler = self._orig_user_handler userkeyword.EmbeddedArgumentsHandler = self._orig_embedded_handler def test_name_from_resource(self): for source, exp in [('resources.html', 'resources'), (os.path.join('..','res','My Res.HTM'), 'My Res'), (os.path.abspath('my_res.xhtml'), 'my_res')]: lib = self._get_userlibrary(source=source) assert_equal(lib.name, exp) def test_name_from_test_case_file(self): assert_none(self._get_userlibrary().name) def test_creating_keyword(self): lib = self._get_userlibrary('kw 1', 'kw 2') assert_equal(len(lib.handlers), 2) assert_true('kw 1' in lib.handlers) assert_true('kw 2' in lib.handlers) def test_creating_keyword_when_kw_name_has_embedded_arg(self): lib = self._get_userlibrary('Embedded ${arg}') self._lib_has_embedded_arg_keyword(lib) def test_creating_keywords_when_normal_and_embedded_arg_kws(self): lib = self._get_userlibrary('kw1', 'Embedded ${arg}', 'kw2') assert_equal(len(lib.handlers), 3) assert_true('kw1' in lib.handlers) assert_true('kw 2' in lib.handlers) self._lib_has_embedded_arg_keyword(lib) def test_creating_duplicate_embedded_arg_keyword_in_resource_file(self): lib = self._get_userlibrary('Embedded ${arg}', 'kw', 'Embedded ${arg}') assert_equal(len(lib.handlers), 3) assert_true(not hasattr(lib.handlers['kw'], 'error')) self._lib_has_embedded_arg_keyword(lib, count=2) def test_creating_duplicate_keyword_in_resource_file(self): lib = self._get_userlibrary('kw', 'kw', 'kw 2') assert_equal(len(lib.handlers), 2) assert_true('kw' in lib.handlers) assert_true('kw 2' in lib.handlers) assert_equal(lib.handlers['kw'].error, "Keyword with same name defined multiple times.") def test_creating_duplicate_keyword_in_test_case_file(self): lib = self._get_userlibrary('MYKW', 'my kw') assert_equal(len(lib.handlers), 1) assert_true('mykw' in lib.handlers) assert_equal(lib.handlers['mykw'].error, "Keyword with same name defined multiple times.") def test_handlers_contains(self): lib = self._get_userlibrary('kw') assert_true('kw' in lib.handlers) assert_true('nonex' not in lib.handlers) def test_handlers_getitem_with_non_existing_keyword(self): lib = self._get_userlibrary('kw') assert_raises_with_msg( DataError, "Test case file contains no keywords matching name 'non existing'.", lib.handlers.__getitem__, 'non existing') def test_handlers_getitem_with_existing_keyword(self): lib = self._get_userlibrary('kw') handler = lib.handlers['kw'] assert_true(isinstance(handler, UserHandlerStub)) def _get_userlibrary(self, *keywords, **conf): resource = ResourceFile(**conf) resource.keywords = [UserKeyword(name) for name in keywords] resource_type = UserLibrary.TEST_CASE_FILE_TYPE \ if 'source' not in conf else UserLibrary.RESOURCE_FILE_TYPE return UserLibrary(resource, resource_type) def _lib_has_embedded_arg_keyword(self, lib, count=1): assert_true('Embedded ${arg}' in lib.handlers) embedded = lib.handlers._embedded assert_equal(len(embedded), count) for template in embedded: assert_equal(template.name, 'Embedded ${arg}') if __name__ == '__main__': unittest.main()
37.806202
80
0.671109
import unittest import os from robot.running import userkeyword from robot.running.model import ResourceFile, UserKeyword from robot.running.userkeyword import UserLibrary from robot.errors import DataError from robot.utils.asserts import (assert_equal, assert_none, assert_raises_with_msg, assert_true) class UserHandlerStub: def __init__(self, kwdata, library): self.name = kwdata.name self.libname = library if kwdata.name == 'FAIL': raise Exception('Expected failure') def create(self, name): return self class EmbeddedArgsHandlerStub: def __init__(self, kwdata, library, embedded): self.name = kwdata.name if kwdata.name != 'Embedded ${arg}': raise TypeError def matches(self, name): return name == self.name class TestUserLibrary(unittest.TestCase): def setUp(self): self._orig_user_handler = userkeyword.UserKeywordHandler self._orig_embedded_handler = userkeyword.EmbeddedArgumentsHandler userkeyword.UserKeywordHandler = UserHandlerStub userkeyword.EmbeddedArgumentsHandler = EmbeddedArgsHandlerStub def tearDown(self): userkeyword.UserKeywordHandler = self._orig_user_handler userkeyword.EmbeddedArgumentsHandler = self._orig_embedded_handler def test_name_from_resource(self): for source, exp in [('resources.html', 'resources'), (os.path.join('..','res','My Res.HTM'), 'My Res'), (os.path.abspath('my_res.xhtml'), 'my_res')]: lib = self._get_userlibrary(source=source) assert_equal(lib.name, exp) def test_name_from_test_case_file(self): assert_none(self._get_userlibrary().name) def test_creating_keyword(self): lib = self._get_userlibrary('kw 1', 'kw 2') assert_equal(len(lib.handlers), 2) assert_true('kw 1' in lib.handlers) assert_true('kw 2' in lib.handlers) def test_creating_keyword_when_kw_name_has_embedded_arg(self): lib = self._get_userlibrary('Embedded ${arg}') self._lib_has_embedded_arg_keyword(lib) def test_creating_keywords_when_normal_and_embedded_arg_kws(self): lib = self._get_userlibrary('kw1', 'Embedded ${arg}', 'kw2') assert_equal(len(lib.handlers), 3) assert_true('kw1' in lib.handlers) assert_true('kw 2' in lib.handlers) self._lib_has_embedded_arg_keyword(lib) def test_creating_duplicate_embedded_arg_keyword_in_resource_file(self): lib = self._get_userlibrary('Embedded ${arg}', 'kw', 'Embedded ${arg}') assert_equal(len(lib.handlers), 3) assert_true(not hasattr(lib.handlers['kw'], 'error')) self._lib_has_embedded_arg_keyword(lib, count=2) def test_creating_duplicate_keyword_in_resource_file(self): lib = self._get_userlibrary('kw', 'kw', 'kw 2') assert_equal(len(lib.handlers), 2) assert_true('kw' in lib.handlers) assert_true('kw 2' in lib.handlers) assert_equal(lib.handlers['kw'].error, "Keyword with same name defined multiple times.") def test_creating_duplicate_keyword_in_test_case_file(self): lib = self._get_userlibrary('MYKW', 'my kw') assert_equal(len(lib.handlers), 1) assert_true('mykw' in lib.handlers) assert_equal(lib.handlers['mykw'].error, "Keyword with same name defined multiple times.") def test_handlers_contains(self): lib = self._get_userlibrary('kw') assert_true('kw' in lib.handlers) assert_true('nonex' not in lib.handlers) def test_handlers_getitem_with_non_existing_keyword(self): lib = self._get_userlibrary('kw') assert_raises_with_msg( DataError, "Test case file contains no keywords matching name 'non existing'.", lib.handlers.__getitem__, 'non existing') def test_handlers_getitem_with_existing_keyword(self): lib = self._get_userlibrary('kw') handler = lib.handlers['kw'] assert_true(isinstance(handler, UserHandlerStub)) def _get_userlibrary(self, *keywords, **conf): resource = ResourceFile(**conf) resource.keywords = [UserKeyword(name) for name in keywords] resource_type = UserLibrary.TEST_CASE_FILE_TYPE \ if 'source' not in conf else UserLibrary.RESOURCE_FILE_TYPE return UserLibrary(resource, resource_type) def _lib_has_embedded_arg_keyword(self, lib, count=1): assert_true('Embedded ${arg}' in lib.handlers) embedded = lib.handlers._embedded assert_equal(len(embedded), count) for template in embedded: assert_equal(template.name, 'Embedded ${arg}') if __name__ == '__main__': unittest.main()
true
true
f711c019c9da28d53aacd9a2c643deca39634632
1,410
py
Python
python-socketio.py
mcity/Mcity-octane-examples
10c59f78eeed2a93bad797ca8a990e6972a5b32a
[ "MIT" ]
2
2020-09-03T20:02:59.000Z
2021-08-24T01:18:51.000Z
python-socketio.py
mcity/Mcity-octane-examples
10c59f78eeed2a93bad797ca8a990e6972a5b32a
[ "MIT" ]
5
2020-03-03T05:53:35.000Z
2022-01-13T03:25:09.000Z
python-socketio.py
mcity/Mcity-octane-examples
10c59f78eeed2a93bad797ca8a990e6972a5b32a
[ "MIT" ]
1
2021-10-06T03:23:27.000Z
2021-10-06T03:23:27.000Z
""" python-socketio.py Sample Mcity OCTANE python socketio script """ import os from dotenv import load_dotenv import socketio #Load environment variables load_dotenv() api_key = os.environ.get('MCITY_OCTANE_KEY', None) server = os.environ.get('MCITY_OCTANE_SERVER', 'http://localhost:5000') namespace = "/octane" #If no API Key provided, exit. if not api_key: print ("No API KEY SPECIFIED. EXITING") exit() #Create an SocketIO Python client. sio = socketio.Client() # Async client is available also: sio = socketio.AsyncClient() def send_auth(): """ Emit an authentication event. """ sio.emit('auth', {'x-api-key': api_key}, namespace=namespace) #Define event callbacks @sio.on('connect', namespace=namespace) def on_connect(): """ Handle connection event and send authentication key """ send_auth() @sio.on('join', namespace=namespace) def on_join(data): """ Event fired when user joins a channel """ print('Join received with ', data) @sio.on('channels', namespace=namespace) def on_channels(data): """ Event fired when a user requests current channel information. """ print('Channel information', data) @sio.on('disconnect', namespace=namespace) def on_disconnect(): """ Event fired on disconnect. """ print('disconnected from server') #Make connection. sio.connect(server, namespaces=[namespace]) sio.wait()
23.114754
71
0.695745
import os from dotenv import load_dotenv import socketio load_dotenv() api_key = os.environ.get('MCITY_OCTANE_KEY', None) server = os.environ.get('MCITY_OCTANE_SERVER', 'http://localhost:5000') namespace = "/octane" if not api_key: print ("No API KEY SPECIFIED. EXITING") exit() sio = socketio.Client() def send_auth(): sio.emit('auth', {'x-api-key': api_key}, namespace=namespace) @sio.on('connect', namespace=namespace) def on_connect(): send_auth() @sio.on('join', namespace=namespace) def on_join(data): print('Join received with ', data) @sio.on('channels', namespace=namespace) def on_channels(data): print('Channel information', data) @sio.on('disconnect', namespace=namespace) def on_disconnect(): print('disconnected from server') sio.connect(server, namespaces=[namespace]) sio.wait()
true
true
f711c06dce76d53b8737288c8de318e6f90ce585
388
py
Python
configs/_base_/det_datasets/icdar2015.py
hongxuenong/mmocr
e8e3a059f8f2e4fca96af37751c33563fc48e2ba
[ "Apache-2.0" ]
2,261
2021-04-08T03:45:41.000Z
2022-03-31T23:37:46.000Z
configs/_base_/det_datasets/icdar2015.py
hongxuenong/mmocr
e8e3a059f8f2e4fca96af37751c33563fc48e2ba
[ "Apache-2.0" ]
789
2021-04-08T05:40:13.000Z
2022-03-31T09:42:39.000Z
configs/_base_/det_datasets/icdar2015.py
hongxuenong/mmocr
e8e3a059f8f2e4fca96af37751c33563fc48e2ba
[ "Apache-2.0" ]
432
2021-04-08T03:56:16.000Z
2022-03-30T18:44:43.000Z
dataset_type = 'IcdarDataset' data_root = 'data/icdar2015' train = dict( type=dataset_type, ann_file=f'{data_root}/instances_training.json', img_prefix=f'{data_root}/imgs', pipeline=None) test = dict( type=dataset_type, ann_file=f'{data_root}/instances_test.json', img_prefix=f'{data_root}/imgs', pipeline=None) train_list = [train] test_list = [test]
20.421053
52
0.698454
dataset_type = 'IcdarDataset' data_root = 'data/icdar2015' train = dict( type=dataset_type, ann_file=f'{data_root}/instances_training.json', img_prefix=f'{data_root}/imgs', pipeline=None) test = dict( type=dataset_type, ann_file=f'{data_root}/instances_test.json', img_prefix=f'{data_root}/imgs', pipeline=None) train_list = [train] test_list = [test]
true
true
f711c13c610a43897310b7806fbf5d47f0a3d3e0
4,518
py
Python
TD3/artest.py
ChenShawn/Grad-Paper-Experiments
00fe1142dae4077b197e99253cc5a4ab759db2ff
[ "MIT" ]
1
2020-04-19T14:28:27.000Z
2020-04-19T14:28:27.000Z
TD3/artest.py
ChenShawn/Grad-Paper-Experiments
00fe1142dae4077b197e99253cc5a4ab759db2ff
[ "MIT" ]
null
null
null
TD3/artest.py
ChenShawn/Grad-Paper-Experiments
00fe1142dae4077b197e99253cc5a4ab759db2ff
[ "MIT" ]
1
2020-04-19T14:28:26.000Z
2020-04-19T14:28:26.000Z
import gym import pybullet_envs from PIL import Image import argparse import numpy as np import torch import copy import os from sklearn.preprocessing import normalize as Normalize from models import TD3, TD3_adv2 def parse_arguments(): parser = argparse.ArgumentParser("TESTING") parser.add_argument('-p', "--policy", type=str, default='td3', help="td3/adv") parser.add_argument('-e', "--env", type=str, default="LunarLanderContinuous-v2", help="env name") parser.add_argument('-n', "--n-episodes", type=int, default=10, help="number of episodes") parser.add_argument("--mode", type=str, default='nr', help="nr (default) / pr") parser.add_argument("--train-seed", type=int, default=1, help="random seed for training") parser.add_argument("--test-seed", type=int, default=1, help="random seed for testing") parser.add_argument("--nr-delta", type=float, default=0.0, help="delta for NR-MDP") parser.add_argument("--pr-prob", type=float, default=0.0, help="prob of PR-MDP") parser.add_argument("--render", action="store_true", default=False) return parser.parse_args() def get_policy(arglist, kwargs, max_action): # Initialize policy if arglist.policy == "td3": # Target policy smoothing is scaled wrt the action scale kwargs["policy_noise"] = 0.0 kwargs["noise_clip"] = 0.0 kwargs["policy_freq"] = 2 policy = TD3.TD3(**kwargs) elif arglist.policy == "OurDDPG": policy = OurDDPG.DDPG(**kwargs) elif arglist.policy == "DDPG": policy = DDPG.DDPG(**kwargs) elif arglist.policy == 'adv': kwargs['alpha'] = 0.01 kwargs['adv_epsilon'] = 0.01 kwargs['logdir'] = f'./tensorboard/{arglist.policy}_{arglist.env}_{arglist.train_seed}/' policy = TD3_adv2.TD3(**kwargs) else: raise NotImplementedError return policy def test(arglist): env_name = arglist.env random_seed = arglist.test_seed n_episodes = arglist.n_episodes lr = 0.002 max_timesteps = 3000 render = arglist.render filename = "{}_{}_{}".format(arglist.policy, env_name, arglist.train_seed) directory = "./train/{}".format(env_name) env = gym.make(env_name) state_dim = env.observation_space.shape[0] action_dim = env.action_space.shape[0] max_action = float(env.action_space.high[0]) # Set random seed env.seed(random_seed) torch.manual_seed(random_seed) np.random.seed(random_seed) kwargs = { "state_dim": state_dim, "action_dim": action_dim, "max_action": max_action, "discount": 0.99, "tau": 0.005, "policy_noise": 0.001, "noise_clip": 1.0, "policy_freq": 2 } policy = get_policy(arglist, kwargs, max_action) policy.load(os.path.join(directory, filename)) total_reward_list = [] for ep in range(1, n_episodes+1): ep_reward = 0.0 state = env.reset() for t in range(max_timesteps): action = policy.select_action(state) if arglist.mode == 'nr': # use truncated gaussian noise for both nr-mdp and pr-mdp settings noise = np.random.normal(0.0, max_action, size=action.shape) noise = np.clip(noise, -max_action, max_action) adv_action = (1.0 - arglist.nr_delta) * action + arglist.nr_delta * noise elif arglist.mode == 'pr': adv_action = action if np.random.rand() < arglist.pr_prob: adv_action = np.random.normal(0.0, action_dim, size=action.shape) adv_action = np.clip(adv_action, -max_action, max_action) else: raise NotImplementedError('invalid mode') state, reward, done, _ = env.step(adv_action) ep_reward += reward if render: env.render() if done: break print('Episode: {}\tReward: {}'.format(ep, int(ep_reward))) total_reward_list.append(ep_reward) ep_reward = 0.0 env.close() return total_reward_list if __name__ == '__main__': args = parse_arguments() reward_list = test(args) reward_array = np.array(reward_list, dtype=np.float32) reward_mean = reward_array.mean() reward_half_std = reward_array.std() / 2.0 loginfo = 'policy={} env={} load_seed={} random_seed={} mode={} pr-prob={} nr-delta={} result={}±{}' print(loginfo.format(args.policy, args.env, args.train_seed, args.test_seed, args.mode, args.pr_prob, args.nr_delta, reward_mean, reward_half_std))
34.48855
151
0.64409
import gym import pybullet_envs from PIL import Image import argparse import numpy as np import torch import copy import os from sklearn.preprocessing import normalize as Normalize from models import TD3, TD3_adv2 def parse_arguments(): parser = argparse.ArgumentParser("TESTING") parser.add_argument('-p', "--policy", type=str, default='td3', help="td3/adv") parser.add_argument('-e', "--env", type=str, default="LunarLanderContinuous-v2", help="env name") parser.add_argument('-n', "--n-episodes", type=int, default=10, help="number of episodes") parser.add_argument("--mode", type=str, default='nr', help="nr (default) / pr") parser.add_argument("--train-seed", type=int, default=1, help="random seed for training") parser.add_argument("--test-seed", type=int, default=1, help="random seed for testing") parser.add_argument("--nr-delta", type=float, default=0.0, help="delta for NR-MDP") parser.add_argument("--pr-prob", type=float, default=0.0, help="prob of PR-MDP") parser.add_argument("--render", action="store_true", default=False) return parser.parse_args() def get_policy(arglist, kwargs, max_action): if arglist.policy == "td3": kwargs["policy_noise"] = 0.0 kwargs["noise_clip"] = 0.0 kwargs["policy_freq"] = 2 policy = TD3.TD3(**kwargs) elif arglist.policy == "OurDDPG": policy = OurDDPG.DDPG(**kwargs) elif arglist.policy == "DDPG": policy = DDPG.DDPG(**kwargs) elif arglist.policy == 'adv': kwargs['alpha'] = 0.01 kwargs['adv_epsilon'] = 0.01 kwargs['logdir'] = f'./tensorboard/{arglist.policy}_{arglist.env}_{arglist.train_seed}/' policy = TD3_adv2.TD3(**kwargs) else: raise NotImplementedError return policy def test(arglist): env_name = arglist.env random_seed = arglist.test_seed n_episodes = arglist.n_episodes lr = 0.002 max_timesteps = 3000 render = arglist.render filename = "{}_{}_{}".format(arglist.policy, env_name, arglist.train_seed) directory = "./train/{}".format(env_name) env = gym.make(env_name) state_dim = env.observation_space.shape[0] action_dim = env.action_space.shape[0] max_action = float(env.action_space.high[0]) env.seed(random_seed) torch.manual_seed(random_seed) np.random.seed(random_seed) kwargs = { "state_dim": state_dim, "action_dim": action_dim, "max_action": max_action, "discount": 0.99, "tau": 0.005, "policy_noise": 0.001, "noise_clip": 1.0, "policy_freq": 2 } policy = get_policy(arglist, kwargs, max_action) policy.load(os.path.join(directory, filename)) total_reward_list = [] for ep in range(1, n_episodes+1): ep_reward = 0.0 state = env.reset() for t in range(max_timesteps): action = policy.select_action(state) if arglist.mode == 'nr': noise = np.random.normal(0.0, max_action, size=action.shape) noise = np.clip(noise, -max_action, max_action) adv_action = (1.0 - arglist.nr_delta) * action + arglist.nr_delta * noise elif arglist.mode == 'pr': adv_action = action if np.random.rand() < arglist.pr_prob: adv_action = np.random.normal(0.0, action_dim, size=action.shape) adv_action = np.clip(adv_action, -max_action, max_action) else: raise NotImplementedError('invalid mode') state, reward, done, _ = env.step(adv_action) ep_reward += reward if render: env.render() if done: break print('Episode: {}\tReward: {}'.format(ep, int(ep_reward))) total_reward_list.append(ep_reward) ep_reward = 0.0 env.close() return total_reward_list if __name__ == '__main__': args = parse_arguments() reward_list = test(args) reward_array = np.array(reward_list, dtype=np.float32) reward_mean = reward_array.mean() reward_half_std = reward_array.std() / 2.0 loginfo = 'policy={} env={} load_seed={} random_seed={} mode={} pr-prob={} nr-delta={} result={}±{}' print(loginfo.format(args.policy, args.env, args.train_seed, args.test_seed, args.mode, args.pr_prob, args.nr_delta, reward_mean, reward_half_std))
true
true
f711c1e97d8fbef4f6cfb982410a393202354df7
4,422
bzl
Python
bazel/p4c_deps.bzl
maemre/p4c
535a49c6e0811517c5c0dbdd15f763581050cbeb
[ "Apache-2.0" ]
null
null
null
bazel/p4c_deps.bzl
maemre/p4c
535a49c6e0811517c5c0dbdd15f763581050cbeb
[ "Apache-2.0" ]
null
null
null
bazel/p4c_deps.bzl
maemre/p4c
535a49c6e0811517c5c0dbdd15f763581050cbeb
[ "Apache-2.0" ]
null
null
null
"""Load dependencies needed to compile p4c as a 3rd-party consumer.""" load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") def p4c_deps(): """Loads dependencies need to compile p4c.""" # Third party projects can define the target # @com_github_p4lang_p4c_extension:ir_extensions with a `filegroup` # containing their custom .def files. if not native.existing_rule("com_github_p4lang_p4c_extension"): # By default, no IR extensions. native.new_local_repository( name = "com_github_p4lang_p4c_extension", path = ".", build_file_content = """ filegroup( name = "ir_extensions", srcs = [], visibility = ["//visibility:public"], ) """, ) if not native.existing_rule("com_github_nelhage_rules_boost"): git_repository( name = "com_github_nelhage_rules_boost", # Newest commit on main branch as of May 3, 2021. commit = "2598b37ce68226fab465c0f0e10988af872b6dc9", remote = "https://github.com/nelhage/rules_boost", shallow_since = "1611019749 -0800", ) if not native.existing_rule("com_github_p4lang_p4runtime"): # Cannot currently use local_repository due to Bazel limitation, # see https://github.com/bazelbuild/bazel/issues/11573. # # native.local_repository( # name = "com_github_p4lang_p4runtime", # path = "@com_github_p4lang_p4c//:control-plane/p4runtime/proto", # ) # # We use git_repository as a workaround; the version used here should # ideally be kept in sync with the submodule control-plane/p4runtime. git_repository( name = "com_github_p4lang_p4runtime", remote = "https://github.com/p4lang/p4runtime", # Newest commit on main branch as of Jan 22, 2021. commit = "0d40261b67283999bf0f03bd6b40b5374c7aebd0", shallow_since = "1611340571 -0800", # strip_prefix is broken; we use patch_cmds as a workaround, # see https://github.com/bazelbuild/bazel/issues/10062. # strip_prefix = "proto", patch_cmds = ["mv proto/* ."], ) if not native.existing_rule("com_google_googletest"): # Cannot currently use local_repository due to Bazel limitation, # see https://github.com/bazelbuild/bazel/issues/11573. # # local_repository( # name = "com_google_googletest", # path = "@com_github_p4lang_p4c//:test/frameworks/gtest", # ) # # We use http_archive as a workaround; the version used here should # ideally be kept in sync with the submodule test/frameworks/gtest. http_archive( name = "com_google_googletest", urls = ["https://github.com/google/googletest/archive/release-1.10.0.tar.gz"], strip_prefix = "googletest-release-1.10.0", sha256 = "9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb", ) if not native.existing_rule("com_google_protobuf"): http_archive( name = "com_google_protobuf", url = "https://github.com/protocolbuffers/protobuf/releases/download/v3.13.0/protobuf-all-3.13.0.tar.gz", strip_prefix = "protobuf-3.13.0", sha256 = "465fd9367992a9b9c4fba34a549773735da200903678b81b25f367982e8df376", ) # Dependencies used by the tc backend if not native.existing_rule("com_google_absl"): http_archive( name = "com_google_absl", # The most recent commit as of 2021-09-02 urls = ["https://github.com/abseil/abseil-cpp/archive/4bb9e39c88854dbf466688177257d11810719853.zip"], strip_prefix = "abseil-cpp-4bb9e39c88854dbf466688177257d11810719853", sha256 = "4cad653c8d6a2c0a551bae3114e2208bf80b0e7d54a4f094f3f5e967c1dab45b", ) if not native.existing_rule("com_github_jbeder_yaml_cpp"): http_archive( name = "com_github_jbeder_yaml_cpp", urls = ["https://github.com/jbeder/yaml-cpp/archive/refs/tags/yaml-cpp-0.7.0.zip"], strip_prefix = "yaml-cpp-yaml-cpp-0.7.0", sha256 = "4d5e664a7fb2d7445fc548cc8c0e1aa7b1a496540eb382d137e2cc263e6d3ef5", )
46.0625
117
0.644957
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") def p4c_deps(): if not native.existing_rule("com_github_p4lang_p4c_extension"): native.new_local_repository( name = "com_github_p4lang_p4c_extension", path = ".", build_file_content = """ filegroup( name = "ir_extensions", srcs = [], visibility = ["//visibility:public"], ) """, ) if not native.existing_rule("com_github_nelhage_rules_boost"): git_repository( name = "com_github_nelhage_rules_boost", commit = "2598b37ce68226fab465c0f0e10988af872b6dc9", remote = "https://github.com/nelhage/rules_boost", shallow_since = "1611019749 -0800", ) if not native.existing_rule("com_github_p4lang_p4runtime"): git_repository( name = "com_github_p4lang_p4runtime", remote = "https://github.com/p4lang/p4runtime", commit = "0d40261b67283999bf0f03bd6b40b5374c7aebd0", shallow_since = "1611340571 -0800", patch_cmds = ["mv proto/* ."], ) if not native.existing_rule("com_google_googletest"): http_archive( name = "com_google_googletest", urls = ["https://github.com/google/googletest/archive/release-1.10.0.tar.gz"], strip_prefix = "googletest-release-1.10.0", sha256 = "9dc9157a9a1551ec7a7e43daea9a694a0bb5fb8bec81235d8a1e6ef64c716dcb", ) if not native.existing_rule("com_google_protobuf"): http_archive( name = "com_google_protobuf", url = "https://github.com/protocolbuffers/protobuf/releases/download/v3.13.0/protobuf-all-3.13.0.tar.gz", strip_prefix = "protobuf-3.13.0", sha256 = "465fd9367992a9b9c4fba34a549773735da200903678b81b25f367982e8df376", ) if not native.existing_rule("com_google_absl"): http_archive( name = "com_google_absl", urls = ["https://github.com/abseil/abseil-cpp/archive/4bb9e39c88854dbf466688177257d11810719853.zip"], strip_prefix = "abseil-cpp-4bb9e39c88854dbf466688177257d11810719853", sha256 = "4cad653c8d6a2c0a551bae3114e2208bf80b0e7d54a4f094f3f5e967c1dab45b", ) if not native.existing_rule("com_github_jbeder_yaml_cpp"): http_archive( name = "com_github_jbeder_yaml_cpp", urls = ["https://github.com/jbeder/yaml-cpp/archive/refs/tags/yaml-cpp-0.7.0.zip"], strip_prefix = "yaml-cpp-yaml-cpp-0.7.0", sha256 = "4d5e664a7fb2d7445fc548cc8c0e1aa7b1a496540eb382d137e2cc263e6d3ef5", )
true
true
f711c29e3d1f0fb14fafb46f7dcad65b099eb3d5
4,558
py
Python
_notebooks/canada_voc.py
cyanide1x/covid19-dashboard
7da01c2477c0691caf869d7401587dc85cacef29
[ "Apache-2.0" ]
1,740
2020-03-19T17:26:24.000Z
2022-03-30T08:04:55.000Z
_notebooks/canada_voc.py
cyanide1x/covid19-dashboard
7da01c2477c0691caf869d7401587dc85cacef29
[ "Apache-2.0" ]
901
2020-03-19T19:16:58.000Z
2022-03-31T00:23:58.000Z
_notebooks/canada_voc.py
cyanide1x/covid19-dashboard
7da01c2477c0691caf869d7401587dc85cacef29
[ "Apache-2.0" ]
516
2020-03-19T16:28:36.000Z
2022-03-28T15:22:19.000Z
import pandas as pd import plotly.express as px url = 'https://health-infobase.canada.ca/src/data/covidLive/covid19-epiSummary-voc.csv' prov_dict = { "AB" : "Alberta", "BC" : "British Columbia", "CA" : "Canada", "MB" : "Manitoba", "NB" : "New Brunswick", "NL" : "Newfoundland and Labrador", "NS" : "Nova Scotia", "NT" : "Northwest Territories", "NU" : "Nunavut", "ON" : "Ontario", "PE" : "Prince Edward Island", "QC" : "Quebec", "SK" : "Saskatchewan", "YK" : "Yukon", "YT" : "Yukon" } colours = ["#012169", "#E03C31", "green", "lightgray"] def get_province(prov): try: return prov_dict[prov] except: return prov def get_area(prov): if prov == 'YK': return 'YT' else: return prov df = pd.read_csv(url).fillna(0) dfclean = df[ (df["report_date"] > "2021") & (df["report_date"] < "2023") & (df["b117"] >= 0) & (df["b1351"] >= 0) & (df["p1"] >= 0) ] dfclean["Province"] = dfclean.apply(lambda r: get_province(r["prov"]), axis=1) dfclean["Area"] = dfclean.apply(lambda r: get_area(r["prov"]), axis=1) dfAlpha = dfclean.copy() dfAlpha["Variant"] = "B.1.1.7 (Alpha)" dfAlpha["Count"] = dfAlpha["b117"] dfBeta = dfclean.copy() dfBeta["Variant"] = "B.1.351 (Beta)" dfBeta["Count"] = dfBeta["b1351"] dfGamma = dfclean.copy() dfGamma["Variant"] = "P.1 (Gamma)" dfGamma["Count"] = dfGamma["p1"] dfvoc = dfAlpha.append(dfBeta).append(dfGamma) dfvocmax = dfvoc.groupby(["Province", "Variant"]).max().reset_index() \ [["Province", "Variant", "Count"]] \ .rename(columns={"Count" : "MaxVocCount"}) dfvoc = pd.merge(dfvoc, dfvocmax, how="left", left_on=["Province", "Variant"], right_on=["Province", "Variant"]) dfvoc = dfvoc.sort_values(by=["Variant", "MaxVocCount", "Province", "report_date"], ascending=[True, False, True, True]) dfvoc["New"] = dfvoc.groupby(["Province", "Variant"])["Count"].diff() dfprov = dfvoc[dfvoc["Province"] != "Canada"] figlineprov = px.line(dfprov, x="report_date", y="Count", color="Variant", facet_col="Province", facet_col_wrap=1, labels={"report_date" : "Reported date", "Count" : "Cumulative cases", "Province" : "Province/Territory"}, title="Cumulative cases with a variant of concern<br>by reported date by province/territory by variant", height=5000, template="plotly_white", color_discrete_sequence=colours, facet_row_spacing=0.025 ) figbarprovd = px.bar(dfprov, x="report_date", y="New", color="Variant", facet_col="Province", facet_col_wrap=1, labels={"report_date" : "Reported date", "New" : "New cases", "Province" : "Province/Territory", "Variant" : "Variant of concern"}, hover_name="Variant", title="New cases with a variant of concern by reported date<br>by province/territory", height=5000, template="plotly_white", color_discrete_sequence=colours, facet_row_spacing=0.025 ) dfcan = dfvoc[dfvoc["Province"] == "Canada"] figlinecan_c = px.line(dfcan, x="report_date", y="Count", color="Variant", labels={"report_date" : "Reported date", "Count" : "Cumulative cases"}, title="Cumulative cases in Canada with a variant of concern<br>by reported date by variant", template="plotly_white", color_discrete_sequence=colours ) figbarcan_d = px.bar(dfcan, x="report_date", y="New", color="Variant", labels={"report_date" : "Reported date", "New" : "New cases", "Variant" : "Variant of concern"}, hover_name="Variant", title="New cases in Canada with a variant of concern by reported date", template="plotly_white", color_discrete_sequence=colours ) # Accessibility date_name = "Date" def join(df, area, variant): dfarea = dfclean[dfclean["Area"] == area][["report_date", variant]].rename(columns={"report_date" : date_name, variant : area}) return pd.merge(df, dfarea, how="left", left_on=[date_name], right_on=[date_name]) def create_table(variant): date_max = dfclean.max()["report_date"] df_max = dfclean[(dfclean["Area"]!="CA") & (dfclean["report_date"] == date_max)][["Area", variant]].sort_values(by=[variant, "Area"], ascending=[False, True]) areas = df_max["Area"].tolist() df_variant = pd.DataFrame() df_variant[date_name] = dfclean[dfclean["Area"]=="CA"]["report_date"] for area in areas: df_variant = join(df_variant, area, variant) df_variant = join(df_variant, "CA", variant) return df_variant.set_index(date_name).sort_values(by=[date_name], ascending=[False]).round().astype(int) df_Alpha = create_table("b117") df_Beta = create_table("b1351") df_Gamma = create_table("p1")
35.889764
159
0.66301
import pandas as pd import plotly.express as px url = 'https://health-infobase.canada.ca/src/data/covidLive/covid19-epiSummary-voc.csv' prov_dict = { "AB" : "Alberta", "BC" : "British Columbia", "CA" : "Canada", "MB" : "Manitoba", "NB" : "New Brunswick", "NL" : "Newfoundland and Labrador", "NS" : "Nova Scotia", "NT" : "Northwest Territories", "NU" : "Nunavut", "ON" : "Ontario", "PE" : "Prince Edward Island", "QC" : "Quebec", "SK" : "Saskatchewan", "YK" : "Yukon", "YT" : "Yukon" } colours = ["#012169", "#E03C31", "green", "lightgray"] def get_province(prov): try: return prov_dict[prov] except: return prov def get_area(prov): if prov == 'YK': return 'YT' else: return prov df = pd.read_csv(url).fillna(0) dfclean = df[ (df["report_date"] > "2021") & (df["report_date"] < "2023") & (df["b117"] >= 0) & (df["b1351"] >= 0) & (df["p1"] >= 0) ] dfclean["Province"] = dfclean.apply(lambda r: get_province(r["prov"]), axis=1) dfclean["Area"] = dfclean.apply(lambda r: get_area(r["prov"]), axis=1) dfAlpha = dfclean.copy() dfAlpha["Variant"] = "B.1.1.7 (Alpha)" dfAlpha["Count"] = dfAlpha["b117"] dfBeta = dfclean.copy() dfBeta["Variant"] = "B.1.351 (Beta)" dfBeta["Count"] = dfBeta["b1351"] dfGamma = dfclean.copy() dfGamma["Variant"] = "P.1 (Gamma)" dfGamma["Count"] = dfGamma["p1"] dfvoc = dfAlpha.append(dfBeta).append(dfGamma) dfvocmax = dfvoc.groupby(["Province", "Variant"]).max().reset_index() \ [["Province", "Variant", "Count"]] \ .rename(columns={"Count" : "MaxVocCount"}) dfvoc = pd.merge(dfvoc, dfvocmax, how="left", left_on=["Province", "Variant"], right_on=["Province", "Variant"]) dfvoc = dfvoc.sort_values(by=["Variant", "MaxVocCount", "Province", "report_date"], ascending=[True, False, True, True]) dfvoc["New"] = dfvoc.groupby(["Province", "Variant"])["Count"].diff() dfprov = dfvoc[dfvoc["Province"] != "Canada"] figlineprov = px.line(dfprov, x="report_date", y="Count", color="Variant", facet_col="Province", facet_col_wrap=1, labels={"report_date" : "Reported date", "Count" : "Cumulative cases", "Province" : "Province/Territory"}, title="Cumulative cases with a variant of concern<br>by reported date by province/territory by variant", height=5000, template="plotly_white", color_discrete_sequence=colours, facet_row_spacing=0.025 ) figbarprovd = px.bar(dfprov, x="report_date", y="New", color="Variant", facet_col="Province", facet_col_wrap=1, labels={"report_date" : "Reported date", "New" : "New cases", "Province" : "Province/Territory", "Variant" : "Variant of concern"}, hover_name="Variant", title="New cases with a variant of concern by reported date<br>by province/territory", height=5000, template="plotly_white", color_discrete_sequence=colours, facet_row_spacing=0.025 ) dfcan = dfvoc[dfvoc["Province"] == "Canada"] figlinecan_c = px.line(dfcan, x="report_date", y="Count", color="Variant", labels={"report_date" : "Reported date", "Count" : "Cumulative cases"}, title="Cumulative cases in Canada with a variant of concern<br>by reported date by variant", template="plotly_white", color_discrete_sequence=colours ) figbarcan_d = px.bar(dfcan, x="report_date", y="New", color="Variant", labels={"report_date" : "Reported date", "New" : "New cases", "Variant" : "Variant of concern"}, hover_name="Variant", title="New cases in Canada with a variant of concern by reported date", template="plotly_white", color_discrete_sequence=colours ) date_name = "Date" def join(df, area, variant): dfarea = dfclean[dfclean["Area"] == area][["report_date", variant]].rename(columns={"report_date" : date_name, variant : area}) return pd.merge(df, dfarea, how="left", left_on=[date_name], right_on=[date_name]) def create_table(variant): date_max = dfclean.max()["report_date"] df_max = dfclean[(dfclean["Area"]!="CA") & (dfclean["report_date"] == date_max)][["Area", variant]].sort_values(by=[variant, "Area"], ascending=[False, True]) areas = df_max["Area"].tolist() df_variant = pd.DataFrame() df_variant[date_name] = dfclean[dfclean["Area"]=="CA"]["report_date"] for area in areas: df_variant = join(df_variant, area, variant) df_variant = join(df_variant, "CA", variant) return df_variant.set_index(date_name).sort_values(by=[date_name], ascending=[False]).round().astype(int) df_Alpha = create_table("b117") df_Beta = create_table("b1351") df_Gamma = create_table("p1")
true
true
f711c384e39bc5150a7c51615c37260e97b037af
2,091
py
Python
August/Week4/Random Point in Non-overlapping Rectangles.py
vinaykumar7686/Leetcode-August_Challenge
fe1928d8b10a63d7aa561118a70eeaec2f3a2f36
[ "MIT" ]
1
2020-08-02T13:41:38.000Z
2020-08-02T13:41:38.000Z
August/Week4/Random Point in Non-overlapping Rectangles.py
vinaykumar7686/Leetcode-August_Challenge
fe1928d8b10a63d7aa561118a70eeaec2f3a2f36
[ "MIT" ]
null
null
null
August/Week4/Random Point in Non-overlapping Rectangles.py
vinaykumar7686/Leetcode-August_Challenge
fe1928d8b10a63d7aa561118a70eeaec2f3a2f36
[ "MIT" ]
null
null
null
# Random Point in Non-overlapping Rectangles ''' Given a list of non-overlapping axis-aligned rectangles rects, write a function pick which randomly and uniformily picks an integer point in the space covered by the rectangles. Note: An integer point is a point that has integer coordinates. A point on the perimeter of a rectangle is included in the space covered by the rectangles. ith rectangle = rects[i] = [x1,y1,x2,y2], where [x1, y1] are the integer coordinates of the bottom-left corner, and [x2, y2] are the integer coordinates of the top-right corner. length and width of each rectangle does not exceed 2000. 1 <= rects.length <= 100 pick return a point as an array of integer coordinates [p_x, p_y] pick is called at most 10000 times. Example 1: Input: ["Solution","pick","pick","pick"] [[[[1,1,5,5]]],[],[],[]] Output: [null,[4,1],[4,1],[3,3]] Example 2: Input: ["Solution","pick","pick","pick","pick","pick"] [[[[-2,-2,-1,-1],[1,0,3,0]]],[],[],[],[],[]] Output: [null,[-1,-2],[2,0],[-2,-1],[3,0],[-2,-2]] Explanation of Input Syntax: The input is two lists: the subroutines called and their arguments. Solution's constructor has one argument, the array of rectangles rects. pick has no arguments. Arguments are always wrapped with a list, even if there aren't any. ''' import random class Solution: def __init__(self, rects: List[List[int]]): self.rects = rects self.weights = [] s = 0 for x1, y1, x2, y2 in rects: w = (x2-x1+1)*(y2-y1+1) self.weights.append(w) s+=w self.weights = [x/s for x in self.weights] print(self.weights) def pick(self) -> List[int]: rectangle = random.choices(population = self.rects, weights = self.weights, k=1)[0] x1, y1, x2, y2 = rectangle return [random.randint(x1,x2), random.randint(y1,y2)] # Your Solution object will be instantiated and called as such: # obj = Solution(rects) # param_1 = obj.pick()
30.75
177
0.624582
import random class Solution: def __init__(self, rects: List[List[int]]): self.rects = rects self.weights = [] s = 0 for x1, y1, x2, y2 in rects: w = (x2-x1+1)*(y2-y1+1) self.weights.append(w) s+=w self.weights = [x/s for x in self.weights] print(self.weights) def pick(self) -> List[int]: rectangle = random.choices(population = self.rects, weights = self.weights, k=1)[0] x1, y1, x2, y2 = rectangle return [random.randint(x1,x2), random.randint(y1,y2)]
true
true
f711c3edb41f016ee66e987b06daca0b3a530302
10,868
py
Python
assignment1/cs231n/classifiers/neural_net.py
yufei1900/cs231n-homework
b7f5a03d5a2b650603074a7c43f203b465b74333
[ "MIT" ]
null
null
null
assignment1/cs231n/classifiers/neural_net.py
yufei1900/cs231n-homework
b7f5a03d5a2b650603074a7c43f203b465b74333
[ "MIT" ]
null
null
null
assignment1/cs231n/classifiers/neural_net.py
yufei1900/cs231n-homework
b7f5a03d5a2b650603074a7c43f203b465b74333
[ "MIT" ]
null
null
null
from __future__ import print_function import numpy as np import matplotlib.pyplot as plt class TwoLayerNet(object): """ A two-layer fully-connected neural network. The net has an input dimension of N, a hidden layer dimension of H, and performs classification over C classes. We train the network with a softmax loss function and L2 regularization on the weight matrices. The network uses a ReLU nonlinearity after the first fully connected layer. In other words, the network has the following architecture: input - fully connected layer - ReLU - fully connected layer - softmax The outputs of the second fully-connected layer are the scores for each class. """ def __init__(self, input_size, hidden_size, output_size, std=1e-4): """ Initialize the model. Weights are initialized to small random values and biases are initialized to zero. Weights and biases are stored in the variable self.params, which is a dictionary with the following keys: W1: First layer weights; has shape (D, H) b1: First layer biases; has shape (H,) W2: Second layer weights; has shape (H, C) b2: Second layer biases; has shape (C,) Inputs: - input_size: The dimension D of the input data. - hidden_size: The number of neurons H in the hidden layer. - output_size: The number of classes C. """ self.params = {} self.params['W1'] = std * np.random.randn(input_size, hidden_size) self.params['b1'] = np.zeros(hidden_size) self.params['W2'] = std * np.random.randn(hidden_size, output_size) self.params['b2'] = np.zeros(output_size) def loss(self, X, y=None, reg=0.0): """ Compute the loss and gradients for a two layer fully connected neural network. Inputs: - X: Input data of shape (N, D). Each X[i] is a training sample. - y: Vector of training labels. y[i] is the label for X[i], and each y[i] is an integer in the range 0 <= y[i] < C. This parameter is optional; if it is not passed then we only return scores, and if it is passed then we instead return the loss and gradients. - reg: Regularization strength. Returns: If y is None, return a matrix scores of shape (N, C) where scores[i, c] is the score for class c on input X[i]. If y is not None, instead return a tuple of: - loss: Loss (data loss and regularization loss) for this batch of training samples. - grads: Dictionary mapping parameter names to gradients of those parameters with respect to the loss function; has the same keys as self.params. """ # Unpack variables from the params dictionary W1, b1 = self.params['W1'], self.params['b1'] W2, b2 = self.params['W2'], self.params['b2'] N, D = X.shape # Compute the forward pass scores = None ############################################################################# # TODO: Perform the forward pass, computing the class scores for the input. # # Store the result in the scores variable, which should be an array of # # shape (N, C). # ############################################################################# out1 = np.maximum(0, X.dot(W1) + b1) # relu, (N, H) scores = out1.dot(W2) + b2 # (N, C) ############################################################################# # END OF YOUR CODE # ############################################################################# # If the targets are not given then jump out, we're done if y is None: return scores # Compute the loss loss = None ############################################################################# # TODO: Finish the forward pass, and compute the loss. This should include # # both the data loss and L2 regularization for W1 and W2. Store the result # # in the variable loss, which should be a scalar. Use the Softmax # # classifier loss. # ############################################################################# correct_class_score = scores[np.arange(N), y].reshape(N, 1) exp_sum = np.sum(np.exp(scores), axis=1).reshape(N, 1) loss = np.sum(np.log(exp_sum) - correct_class_score) loss /= N loss += 0.5 * reg * np.sum(W1 * W1)+ 0.5 * reg * np.sum(W2 * W2) ############################################################################# # END OF YOUR CODE # ############################################################################# # Backward pass: compute gradients grads = {} ############################################################################# # TODO: Compute the backward pass, computing the derivatives of the weights # # and biases. Store the results in the grads dictionary. For example, # # grads['W1'] should store the gradient on W1, and be a matrix of same size # ############################################################################# margin = np.exp(scores) / exp_sum margin[np.arange(N), y] += -1 margin /= N #(N, C) dW2 = out1.T.dot(margin) #(H ,C) dW2 += reg * W2 grads['W2'] = dW2 grads['b2'] = np.sum(margin, axis = 0) margin1 = margin.dot(W2.T) #(N, H) margin1[out1 <= 0] = 0 dW1 = X.T.dot(margin1) #(D, H) dW1 += reg * W1 grads['W1'] = dW1 grads['b1'] = np.sum(margin1, axis = 0) ############################################################################# # END OF YOUR CODE # ############################################################################# return loss, grads def train(self, X, y, X_val, y_val, learning_rate=1e-3, learning_rate_decay=0.95, reg=5e-6, num_iters=100, batch_size=200, verbose=False): """ Train this neural network using stochastic gradient descent. Inputs: - X: A numpy array of shape (N, D) giving training data. - y: A numpy array f shape (N,) giving training labels; y[i] = c means that X[i] has label c, where 0 <= c < C. - X_val: A numpy array of shape (N_val, D) giving validation data. - y_val: A numpy array of shape (N_val,) giving validation labels. - learning_rate: Scalar giving learning rate for optimization. - learning_rate_decay: Scalar giving factor used to decay the learning rate after each epoch. - reg: Scalar giving regularization strength. - num_iters: Number of steps to take when optimizing. - batch_size: Number of training examples to use per step. - verbose: boolean; if true print progress during optimization. """ num_train = X.shape[0] iterations_per_epoch = max(num_train / batch_size, 1) # Use SGD to optimize the parameters in self.model loss_history = [] train_acc_history = [] val_acc_history = [] for it in range(num_iters): X_batch = None y_batch = None ######################################################################### # TODO: Create a random minibatch of training data and labels, storing # # them in X_batch and y_batch respectively. # ######################################################################### mask = np.random.choice(num_train, batch_size, replace=True) X_batch = X[mask] y_batch = y[mask] ######################################################################### # END OF YOUR CODE # ######################################################################### # Compute loss and gradients using the current minibatch loss, grads = self.loss(X_batch, y=y_batch, reg=reg) loss_history.append(loss) ######################################################################### # TODO: Use the gradients in the grads dictionary to update the # # parameters of the network (stored in the dictionary self.params) # # using stochastic gradient descent. You'll need to use the gradients # # stored in the grads dictionary defined above. # ######################################################################### self.params['W1'] -= learning_rate * grads['W1'] self.params['W2'] -= learning_rate * grads['W2'] self.params['b1'] -= learning_rate * grads['b1'] self.params['b2'] -= learning_rate * grads['b2'] ######################################################################### # END OF YOUR CODE # ######################################################################### if verbose and it % 100 == 0: print('iteration %d / %d: loss %f' % (it, num_iters, loss)) # Every epoch, check train and val accuracy and decay learning rate. if it % iterations_per_epoch == 0: # Check accuracy train_acc = (self.predict(X_batch) == y_batch).mean() val_acc = (self.predict(X_val) == y_val).mean() train_acc_history.append(train_acc) val_acc_history.append(val_acc) # Decay learning rate learning_rate *= learning_rate_decay return { 'loss_history': loss_history, 'train_acc_history': train_acc_history, 'val_acc_history': val_acc_history, } def predict(self, X): """ Use the trained weights of this two-layer network to predict labels for data points. For each data point we predict scores for each of the C classes, and assign each data point to the class with the highest score. Inputs: - X: A numpy array of shape (N, D) giving N D-dimensional data points to classify. Returns: - y_pred: A numpy array of shape (N,) giving predicted labels for each of the elements of X. For all i, y_pred[i] = c means that X[i] is predicted to have class c, where 0 <= c < C. """ y_pred = None ########################################################################### # TODO: Implement this function; it should be VERY simple! # ########################################################################### out1 = np.maximum(0, X.dot(self.params['W1']) + self.params['b1']) # relu, (N, H) y_pred = np.argmax(out1.dot(self.params['W2']) + self.params['b2'],axis = 1) # (N, C) ########################################################################### # END OF YOUR CODE # ########################################################################### return y_pred
44.72428
90
0.502392
from __future__ import print_function import numpy as np import matplotlib.pyplot as plt class TwoLayerNet(object): def __init__(self, input_size, hidden_size, output_size, std=1e-4): self.params = {} self.params['W1'] = std * np.random.randn(input_size, hidden_size) self.params['b1'] = np.zeros(hidden_size) self.params['W2'] = std * np.random.randn(hidden_size, output_size) self.params['b2'] = np.zeros(output_size) def loss(self, X, y=None, reg=0.0): W1, b1 = self.params['W1'], self.params['b1'] W2, b2 = self.params['W2'], self.params['b2'] N, D = X.shape scores = None
true
true
f711c50b29718841fd8d02a9599430af17487b17
197
py
Python
mainconfig/wsgi.py
rodrigofuentealbacartes/podcasts
a64190a92b346d9432143a659bb3e69bbdb62ea9
[ "MIT" ]
null
null
null
mainconfig/wsgi.py
rodrigofuentealbacartes/podcasts
a64190a92b346d9432143a659bb3e69bbdb62ea9
[ "MIT" ]
null
null
null
mainconfig/wsgi.py
rodrigofuentealbacartes/podcasts
a64190a92b346d9432143a659bb3e69bbdb62ea9
[ "MIT" ]
null
null
null
# coding: utf-8 import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mainconfig.settings') application = get_wsgi_application()
19.7
71
0.771574
import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mainconfig.settings') application = get_wsgi_application()
true
true
f711c675108eb978960175f4072850d6e419edbf
568
py
Python
model/group.py
winsok/pythonlearning
b74e31a2c3e830d0563ca90c5ba32d59eaf4e74d
[ "Apache-2.0" ]
null
null
null
model/group.py
winsok/pythonlearning
b74e31a2c3e830d0563ca90c5ba32d59eaf4e74d
[ "Apache-2.0" ]
null
null
null
model/group.py
winsok/pythonlearning
b74e31a2c3e830d0563ca90c5ba32d59eaf4e74d
[ "Apache-2.0" ]
null
null
null
from sys import maxsize class Group: def __init__(self, name=None, header=None, footer=None, id=None): self.name = name self.header = header self.footer = footer self.id = id def __repr__(self): return "%s:%s:%s:%s" % (self.id, self.name, self.header,self.footer) def __eq__(self, other): return (self.id is None or other.id is None or self.id == other.id) and self.name == other.name def id_or_max(self): if self.id: return int(self.id) else: return maxsize
25.818182
103
0.584507
from sys import maxsize class Group: def __init__(self, name=None, header=None, footer=None, id=None): self.name = name self.header = header self.footer = footer self.id = id def __repr__(self): return "%s:%s:%s:%s" % (self.id, self.name, self.header,self.footer) def __eq__(self, other): return (self.id is None or other.id is None or self.id == other.id) and self.name == other.name def id_or_max(self): if self.id: return int(self.id) else: return maxsize
true
true
f711c959f7372ed5b06b0f6103bde288b12876ab
351
py
Python
#001/main.py
DSAghicha/project-euler
74de01ab4bbd8b337b2a9c79118f3e8487e7766c
[ "MIT" ]
null
null
null
#001/main.py
DSAghicha/project-euler
74de01ab4bbd8b337b2a9c79118f3e8487e7766c
[ "MIT" ]
null
null
null
#001/main.py
DSAghicha/project-euler
74de01ab4bbd8b337b2a9c79118f3e8487e7766c
[ "MIT" ]
null
null
null
""" @param: n -> int : Upper Limit of the range """ def multiples(n: int) -> int: num: list = [] for i in range(1, n): if (i % 3 == 0) or (i % 5 == 0): num.append(i) return sum(num) if __name__ == '__main__': t: int = int(input()) for _x in range(t): n: int = int(input()) print(multiples(n))
20.647059
43
0.48433
def multiples(n: int) -> int: num: list = [] for i in range(1, n): if (i % 3 == 0) or (i % 5 == 0): num.append(i) return sum(num) if __name__ == '__main__': t: int = int(input()) for _x in range(t): n: int = int(input()) print(multiples(n))
true
true
f711c9c48b8465a09bdde89596887d208a331294
347
py
Python
7/7_3.py
kopsh/python_cookbook
298c092cd20404a0755e2170776c44a04e8648ad
[ "CNRI-Python" ]
null
null
null
7/7_3.py
kopsh/python_cookbook
298c092cd20404a0755e2170776c44a04e8648ad
[ "CNRI-Python" ]
null
null
null
7/7_3.py
kopsh/python_cookbook
298c092cd20404a0755e2170776c44a04e8648ad
[ "CNRI-Python" ]
null
null
null
class Solution: r""" 函数注解 >>> def add(x: int, y: int) -> int: ... return a + b >>> add.__annotations__ {'x': <class 'int'>, 'y': <class 'int'>, 'return': <class 'int'>} """ def __init__(self): pass def solve(self): pass if __name__ == '__main__': import doctest doctest.testmod()
18.263158
69
0.498559
class Solution: def __init__(self): pass def solve(self): pass if __name__ == '__main__': import doctest doctest.testmod()
true
true
f711ca2aa2c70076688df3fd190dd200762c7261
36,146
py
Python
source/SpreadsheettoEAD/func/archdesc.py
gwiedeman/eadmachine
f6c0c0f92fc20ab6dcf4962fda827b7adb4749d4
[ "Unlicense" ]
5
2016-01-25T15:27:12.000Z
2021-08-17T22:31:48.000Z
source/SpreadsheettoEAD/func/archdesc.py
gwiedeman/eadmachine
f6c0c0f92fc20ab6dcf4962fda827b7adb4749d4
[ "Unlicense" ]
null
null
null
source/SpreadsheettoEAD/func/archdesc.py
gwiedeman/eadmachine
f6c0c0f92fc20ab6dcf4962fda827b7adb4749d4
[ "Unlicense" ]
null
null
null
# module for the <archdesc/> or collection-level description import xml.etree.cElementTree as ET from archdescsimple import archdescsimple from access_use_restrict import access_use_restrict import globals import wx def archdesc(arch_root, CSheet, version, input_data): from wx.lib.pubsub import pub #update GUI progress bar if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing <archdesc>...") #collection-level did if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing collection-level <did>...") from collection_did import collection_did cdid_root = arch_root.find('did') collection_did(cdid_root, CSheet, version) if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing <archdesc> elements...") #Access Restrictions Section if "add_accessrestrict" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False access_use_restrict(arch_root, CSheet.find('Access'), "accessrestrict", "Access", add) #Accruals Section if "add_accruals" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "accruals", CSheet.find('Accruals'), CSheet.find('Accruals/Accrual'), add) #Acquisitions Information Section if CSheet.find('AcquisitionInfo/Acquis/Event') is None: pass else: if CSheet.find('AcquisitionInfo/Acquis/Event').text: if arch_root.find('acqinfo') is None: if "add_acq" in globals.new_elements or "add-all" in globals.add_all: acq_element = ET.Element('acqinfo') arch_root.insert(1, acq_element) count = 0 for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text and acquis.find('Date').text: count = count + 1 if count > 1: chronlist_element = ET.Element('chronlist') arch_root.find('acqinfo').append(chronlist_element) for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: chronitem_element = ET.Element('chronitem') chronlist_element.append(chronitem_element) event_element = ET.Element('event') chronitem_element.append(event_element) event_element.text = acquis.find('Event').text if version == "ead3": if acquis.find('Date').text: from date import basic_date chronitem_element.append(basic_date(acquis.find('Date').text, acquis.find('DateNormal').text, 'inclusive')) else: date_element = ET.Element('date') if acquis.find('Date').text: chronitem_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: p_element = ET.Element('p') arch_root.find('acqinfo').append(p_element) p_element.text = acquis.find('Event').text date_element = ET.Element('date') if acquis.find('Date').text: p_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: old_acquis = arch_root.find('acqinfo').attrib old_head = arch_root.find('acqinfo/head') arch_root.find('acqinfo').clear() if old_acquis is None: pass else: arch_root.find('acqinfo').attrib = old_acquis if old_head is None: pass else: arch_root.find('acqinfo').append(old_head) count = 0 for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text and acquis.find('Date').text: count = count + 1 if count > 1: chronlist_element = ET.Element('chronlist') arch_root.find('acqinfo').append(chronlist_element) for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: chronitem_element = ET.Element('chronitem') chronlist_element.append(chronitem_element) event_element = ET.Element('event') chronitem_element.append(event_element) event_element.text = acquis.find('Event').text if version == "ead3": if acquis.find('Date').text: from date import basic_date chronitem_element.append(basic_date(acquis.find('Date').text, acquis.find('DateNormal').text, 'inclusive')) else: date_element = ET.Element('date') if acquis.find('Date').text: chronitem_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: p_element = ET.Element('p') arch_root.find('acqinfo').append(p_element) p_element.text = acquis.find('Event').text date_element = ET.Element('date') if acquis.find('Date').text: p_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: for empty_acquis in arch_root: if empty_acquis.tag == "acqinfo": arch_root.remove(empty_acquis) # Alternate Forms Available Section <altformavail> if "add_altforms" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "altformavail", CSheet.find('AlternateForms'), CSheet.find('AlternateForms/Alternative'), add) # Appraisal Section <appraisal> if "add_appraisal" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "appraisal", CSheet.find('AppraisalInfo'), CSheet.find('AppraisalInfo/Appraisal'), add) # Arrangement Section <arrangement> if "add_arrange" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False if arch_root.find('arrangement/list') is None: arrange_list = False else: arrange_list = True archdescsimple(arch_root, "arrangement", CSheet.find('CollectionArrangement'), CSheet.find('CollectionArrangement/Arrangement'), add) if arrange_list == True: if CSheet.find('CollectionMap/Component/ComponentName').text: if CSheet.find('CollectionMap/Component/ComponentName').text.lower() == "no series" or CSheet.find('CollectionMap/Component/ComponentName').text.lower() == "noseries": pass else: list_element = ET.Element('list') if arch_root.find('arrangement') is None: arrangement_element = ET.Element('arrangement') arr_index = arch_root.getchildren().index(arch_root.find('dsc')) - 1 arch_root.insert(arr_index, arrangement_element) arrangement_element.append(list_element) else: arch_root.find('arrangement').append(list_element) list_element.set('type', 'simple') for cmpnt in CSheet.find('CollectionMap'): if cmpnt.find('ComponentName').text: item_element = ET.Element('item') list_element.append(item_element) if cmpnt.find('ComponentLevel').text == "1": emph_element = ET.Element('emph') item_element.append(emph_element) emph_element.set('render', 'bold') if cmpnt.find('ComponentNumber').text: emph_element.text = "Series " + cmpnt.find('ComponentNumber').text + " - " + cmpnt.find('ComponentName').text else: emph_element.text = "Series" + " - " + cmpnt.find('ComponentName').text cmpnt_num = cmpnt.find('ComponentNumber').text for ComponentSheet in input_data: if ComponentSheet.find('SeriesNumber') is None: pass elif ComponentSheet.find('SeriesNumber').text == cmpnt_num: cmpnt_info = ComponentSheet if cmpnt_info.find('SeriesDate').text: emph_element.tail = ", " + cmpnt_info.find('SeriesDate').text else: if cmpnt.find('ComponentNumber').text: item_element.text = "Subseries " + cmpnt.find('ComponentNumber').text + ": " + cmpnt.find('ComponentName').text else: item_element.text = "Subseries" + ": " + cmpnt.find('ComponentName').text cmpnt_num = cmpnt.find('ComponentNumber').text for ComponentSheet in input_data: if ComponentSheet.find('SeriesNumber') is None: pass elif ComponentSheet.find('SeriesNumber').text == cmpnt_num: cmpnt_info = ComponentSheet if cmpnt_info.find('SeriesDate').text: emph_element.tail = ", " + cmpnt_info.find('SeriesDate').text # Bibliography Section <bibliography> if CSheet.find('PublicationBibliography/Publication/Title').text or CSheet.find('ManuscriptBibliography/Manuscript/UnitTitle').text: if arch_root.find('bibliography') is None: if "add_biblio" in globals.new_elements or "add-all" in globals.add_all: biblio_element = ET.Element('bibliography') biblio_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(biblio_index, biblio_element) if CSheet.find('BibliographyNote').text: p_element = ET.Element('p') biblio_element.append(p_element) p_element.text = CSheet.find('BibliographyNote').text for pub in CSheet.find('PublicationBibliography'): if pub.find('Author').text or pub.find('Title').text or pub.find('Citation').text: bibref_element = ET.Element('bibref') biblio_element.append(bibref_element) if pub.find('Author').text: bibref_element.text = pub.find('Author').text + ", " if pub.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = pub.find('Title').text if pub.find('Citation').text: title_element.tail = " " + pub.find('Citation').text + ", " if pub.find('Date').text: date_element = ET.Element('date') if version == "ead3": bibref_element.append(date_element) date_element.text = pub.find('Date').text else: if pub.find('Title').text: title_element.append(date_element) date_element.text = pub.find('Date').text else: bibref_element.append(date_element) date_element.text = pub.find('Date').text if pub.find('NormalDate').text: date_element.set("normal", pub.find('NormalDate').text) if pub.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = pub.find('Reference').text if pub.find('ReferenceLink').text: ref_element.set('href', pub.find('ReferenceLink').text) for man in CSheet.find('ManuscriptBibliography'): if man.find('Collection').text or man.find('UnitID').text or man.find('UnitID').text: archref_element = ET.Element('archref') arch_root.find('bibliography').append(archref_element) if man.find('Collection').text: archref_element.text = man.find('Collection').text + ", " if man.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = man.find('UnitTitle').text if man.find('UnitID').text: title_element.tail = " " + man.find('UnitID').text + ", " if man.find('Date').text: date_element = ET.Element('date') if version == "ead3": archref_element.append(date_element) date_element.text = man.find('Date').text else: if man.find('UnitTitle').text: title_element.append(date_element) date_element.text = man.find('Date').text else: archref_element.append(date_element) date_element.text = man.find('Date').text if man.find('NormalDate').text: date_element.set("normal", man.find('NormalDate').text) if man.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = man.find('Reference').text if man.find('ReferenceLink').text: ref_element.set('href', man.find('ReferenceLink').text) else: old_biblio = arch_root.find('bibliography').attrib old_head = arch_root.find('bibliography/head') arch_root.find('bibliography').clear() if old_biblio is None: pass else: arch_root.find('bibliography').attrib = old_biblio if old_head is None: pass else: arch_root.find('bibliography').append(old_head) if CSheet.find('BibliographyNote').text: p_element = ET.Element('p') arch_root.find('bibliography').append(p_element) p_element.text = CSheet.find('BibliographyNote').text for pub in CSheet.find('PublicationBibliography'): if pub.find('Author').text or pub.find('Title').text or pub.find('Citation').text: bibref_element = ET.Element('bibref') arch_root.find('bibliography').append(bibref_element) if pub.find('Author').text: bibref_element.text = pub.find('Author').text + ", " if pub.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = pub.find('Title').text if pub.find('Citation').text: title_element.tail = " " + pub.find('Citation').text + ", " if pub.find('Date').text: date_element = ET.Element('date') if version == "ead3": bibref_element.append(date_element) date_element.text = pub.find('Date').text else: if pub.find('Title').text: title_element.append(date_element) date_element.text = pub.find('Date').text else: bibref_element.append(date_element) date_element.text = pub.find('Date').text if pub.find('NormalDate').text: date_element.set("normal", pub.find('NormalDate').text) if pub.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = pub.find('Reference').text if pub.find('ReferenceLink').text: ref_element.set('href', pub.find('ReferenceLink').text) for man in CSheet.find('ManuscriptBibliography'): if man.find('Collection').text or man.find('UnitID').text or man.find('UnitID').text: archref_element = ET.Element('archref') arch_root.find('bibliography').append(archref_element) if man.find('Collection').text: archref_element.text = man.find('Collection').text + ", " if man.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = man.find('UnitTitle').text if man.find('UnitID').text: title_element.tail = " " + man.find('UnitID').text + ", " if man.find('Date').text: date_element = ET.Element('date') if version == "ead3": archref_element.append(date_element) date_element.text = man.find('Date').text else: if man.find('UnitTitle').text: title_element.append(date_element) date_element.text = man.find('Date').text else: archref_element.append(date_element) date_element.text = man.find('Date').text if man.find('NormalDate').text: date_element.set("normal", man.find('NormalDate').text) if man.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = man.find('Reference').text if man.find('ReferenceLink').text: ref_element.set('href', man.find('ReferenceLink').text) else: old_biblio_list = arch_root.findall('bibliography') for old_biblio in old_biblio_list: arch_root.remove(old_biblio) # Biographical or Administrative History Section<bioghist> if CSheet.find('HistoricalNote/p') is None: pass else: if CSheet.find('HistoricalNote/p').text: if arch_root.find('bioghist') is None: if "add_bio" in globals.new_elements or "add-all" in globals.add_all: bio_element = ET.Element('bioghist') bio_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(bio_index, bio_element) if CSheet.find('HistoricalNoteTitle').text: head_element = ET.Element('head') arch_root.find('bioghist').append(head_element) head_element.text = CSheet.find('HistoricalNoteTitle').text for para in CSheet.find('HistoricalNote'): p_element = ET.Element('p') bio_element.append(p_element) p_element.text = para.text else: arch_root.find('bioghist').clear() if CSheet.find('HistoricalNoteTitle').text: head_element = ET.Element('head') arch_root.find('bioghist').append(head_element) head_element.text = CSheet.find('HistoricalNoteTitle').text for para in CSheet.find('HistoricalNote'): p_element = ET.Element('p') arch_root.find('bioghist').append(p_element) p_element.text = para.text else: old_hist_list = arch_root.findall('bioghist') for old_hist in old_hist_list: arch_root.remove(old_hist) # Controlled Access Headings <controlaccess> old_access = arch_root.find('controlaccess') if CSheet.find('ControlledAccess/AccessPoint/Part') is None or CSheet.find('ControlledAccess/AccessPoint/ElementName') is None: pass else: if CSheet.find('ControlledAccess/AccessPoint/Part').text and CSheet.find('ControlledAccess/AccessPoint/ElementName').text: if arch_root.find('controlaccess') is None: if "add_controlaccess" in globals.new_elements or "add-all" in globals.add_all: access_element = ET.Element('controlaccess') access_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(access_index, access_element) for access in CSheet.find('ControlledAccess'): if access.find('UnitID').text: pass else: if access.find('Part').text and access.find('ElementName').text: new_element = ET.Element(access.find('ElementName').text) access_element.append(new_element) if version == "ead2002": new_element.text = access.find('Part').text else: part_element = ET.Element('part') new_element.append(part_element) part_element.text = access.find('Part').text if access.find('MARCEncoding').text: new_element.set('encodinganalog', access.find('MARCEncoding').text) if access.find('Identifier').text: if version == "ead3": new_element.set('identifier', access.find('Identifier').text) else: new_element.set('id', access.find('Identifier').text) if access.find('Relator').text: if version == "ead3": new_element.set('relator', access.find('Relator').text) else: new_element.set('role', access.find('Relator').text) if access.find('Normal').text: new_element.set('normal', access.find('Normal').text) if access.find('Source').text: new_element.set('source', access.find('Source').text) else: if access.find('Part').text or access.find('ElementName').text: from messages import error error("All Access Headings must have both an Element Name and a Heading, headings without these fields will not be encoded.", False) else: old_access = arch_root.find('controlaccess').attrib old_access_list = arch_root.findall('controlaccess') for old_access_ele in old_access_list: arch_root.remove(old_access_ele) access_element = ET.Element('controlaccess') access_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(access_index, access_element) if old_access is None: pass else: access_element.attrib = old_access for access in CSheet.find('ControlledAccess'): if access.find('UnitID').text: pass else: if access.find('Part').text and access.find('ElementName').text: new_element = ET.Element(access.find('ElementName').text) access_element.append(new_element) if version == "ead2002": new_element.text = access.find('Part').text else: part_element = ET.Element('part') new_element.append(part_element) part_element.text = access.find('Part').text if access.find('MARCEncoding').text: new_element.set('encodinganalog', access.find('MARCEncoding').text) if access.find('Identifier').text: if version == "ead3": new_element.set('identifier', access.find('Identifier').text) else: new_element.set('id', access.find('Identifier').text) if access.find('Relator').text: if version == "ead3": new_element.set('relator', access.find('Relator').text) else: new_element.set('role', access.find('Relator').text) if access.find('Normal').text: new_element.set('normal', access.find('Normal').text) if access.find('Source').text: new_element.set('source', access.find('Source').text) else: if access.find('Part').text or access.find('ElementName').text: from messages import error error("All Access Headings must have both an Element Name and a Heading, headings without these fields will not be encoded.", False) else: old_ca_list = arch_root.findall('controlaccess') for old_ca in old_ca_list: arch_root.remove(old_ca) # Custodial History section <custodhist> if "add_custhistory" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "custodhist", CSheet.find('CustodialHistory'), CSheet.find('CustodialHistory/Event'), add) # Legal Status <legalstatus> if "add_legalstatus" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "legalstatus", CSheet.find('LegalStatus'), CSheet.find('LegalStatus/Status'), add) # Location of Originals when collection contains photocopies, etc. <originalsloc> if "add_originalsloc" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "originalsloc", CSheet.find('LocationOriginals'), CSheet.find('LocationOriginals/Location'), add) # Other Finding Aids <otherfindaid> if "add_otherfa" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "otherfindaid", CSheet.find('OtherFindingAids'), CSheet.find('OtherFindingAids/Other'), add) # Physical or technical details or requirements <phystech> if "add_phystech" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "phystech", CSheet.find('PhysicalTechnical'), CSheet.find('PhysicalTechnical/Details'), add) # Preferred Citation <prefercite> if "add_prefcite" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "prefercite", CSheet.find('PreferredCitation'), CSheet.find('PreferredCitation/Example'), add) # Processing Information <processinfo> if "add_processinfo" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "processinfo", CSheet.find('ProcessingInformation'), CSheet.find('ProcessingInformation/Details'), add) # Related Material <relatedmaterial> if CSheet.find('RelatedPublications/Publication/Title') is None or CSheet.find('RelatedManuscripts/Manuscript/UnitTitle') is None: pass else: if CSheet.find('RelatedPublications/Publication/Title').text or CSheet.find('RelatedManuscripts/Manuscript/UnitTitle').text: if arch_root.find('relatedmaterial') is None: if "add_related" in globals.new_elements or "add-all" in globals.add_all: related_element = ET.Element('relatedmaterial') related_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(related_index, related_element) if CSheet.find('RelatedMaterialNotes') is None: pass else: for note in CSheet.find('RelatedMaterialNotes'): if note.text: p_element = ET.Element('p') related_element.append(p_element) p_element.text = note.text for related in CSheet.find('RelatedPublications'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if related.find('Author').text or related.find('Title').text or related.find('Citation').text: bibref_element = ET.Element('bibref') related_element.append(bibref_element) if related.find('Author').text: bibref_element.text = related.find('Author').text + ", " if related.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = related.find('Title').text if related.find('Citation').text: title_element.tail = " " + related.find('Citation').text + ", " if related.find('Date').text: date_element = ET.Element('date') bibref_element.append(date_element) date_element.text = related.find('Date').text if related.find('NormalDate').text: date_element.set("normal", related.find('NormalDate').text) if related.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = related.find('Reference').text if related.find('ReferenceLink').text: ref_element.set('href', related.find('ReferenceLink').text) for relatedman in CSheet.find('RelatedManuscripts'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if relatedman.find('Collection').text or relatedman.find('UnitTitle').text or relatedman.find('MaterialID').text: archref_element = ET.Element('archref') arch_root.find('relatedmaterial').append(archref_element) if relatedman.find('Collection').text: archref_element.text = relatedman.find('Collection').text + ", " if relatedman.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = relatedman.find('UnitTitle').text if relatedman.find('MaterialID').text: title_element.tail = " " + relatedman.find('MaterialID').text + ", " if relatedman.find('Date').text: date_element = ET.Element('date') archref_element.append(date_element) date_element.text = relatedman.find('Date').text if relatedman.find('NormalDate').text: date_element.set("normal", relatedman.find('NormalDate').text) if relatedman.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = relatedman.find('Reference').text if relatedman.find('ReferenceLink').text: ref_element.set('href', relatedman.find('ReferenceLink').text) else: old_related = arch_root.find('relatedmaterial').attrib old_head = arch_root.find('relatedmaterial/head') arch_root.find('relatedmaterial').clear() if old_related is None: pass else: arch_root.find('relatedmaterial').attrib = old_related if old_head is None: pass else: arch_root.find('relatedmaterial').append(old_head) for note in CSheet.find('RelatedMaterialNotes'): if note.text: p_element = ET.Element('p') arch_root.find('relatedmaterial').append(p_element) p_element.text = note.text for related in CSheet.find('RelatedPublications'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if related.find('Author').text or related.find('Title').text or related.find('Citation').text: bibref_element = ET.Element('bibref') arch_root.find('relatedmaterial').append(bibref_element) if related.find('Author').text: bibref_element.text = related.find('Author').text + ", " if related.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = related.find('Title').text if related.find('Citation').text: title_element.tail = " " + related.find('Citation').text + ", " if related.find('Date').text: date_element = ET.Element('date') bibref_element.append(date_element) date_element.text = related.find('Date').text if related.find('NormalDate').text: date_element.set("normal", related.find('NormalDate').text) if related.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = related.find('Reference').text if related.find('ReferenceLink').text: ref_element.set('href', related.find('ReferenceLink').text) for relatedman in CSheet.find('RelatedManuscripts'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if relatedman.find('Collection').text or relatedman.find('UnitTitle').text or relatedman.find('MaterialID').text: archref_element = ET.Element('archref') arch_root.find('relatedmaterial').append(archref_element) if relatedman.find('Collection').text: archref_element.text = relatedman.find('Collection').text + ", " if relatedman.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = relatedman.find('UnitTitle').text if relatedman.find('MaterialID').text: title_element.tail = " " + relatedman.find('MaterialID').text + ", " if relatedman.find('Date').text: date_element = ET.Element('date') archref_element.append(date_element) date_element.text = relatedman.find('Date').text if relatedman.find('NormalDate').text: date_element.set("normal", relatedman.find('NormalDate').text) if relatedman.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = relatedman.find('Reference').text if relatedman.find('ReferenceLink').text: ref_element.set('href', relatedman.find('ReferenceLink').text) else: old_related_list = arch_root.findall('relatedmaterial') for old_related in old_related_list: arch_root.remove(old_related) #relations from relations import relations if version == "ead3": relations(arch_root, CSheet.find('Relations')) # Scope and Content Note <scopecontent> if "add_scope" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "scopecontent", CSheet.find('ScopeContent'), CSheet.find('ScopeContent/p'), add) # Separated Materials <separatedmaterial> if "add_sepmat" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "separatedmaterial", CSheet.find('SeparatedMaterial'), CSheet.find('SeparatedMaterial/Material'), add) # Use Restrictions <userestrict> if "add_userestrict" in globals.new_elements or "add-all" in globals.add_all: add is True else: add is False access_use_restrict(arch_root, CSheet.find('UseRestrictions'), "userestrict", "Use", add) #dsc from dsc import dsc dsc(arch_root.find('dsc'), input_data, version) ################################################################################################################## #archdesc elements matched to lower levels: ################################################################################################################## from wx.lib.pubsub import pub if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing <archdesc> elements to lower levels...") #Access and Use Restricitons matched to lower levels: from access_use_restrict import access_use_lower if CSheet.find('CollectionID').text and CSheet.find('IDModel/CollectionSeparator').text: collectionID = CSheet.find('CollectionID').text + CSheet.find('IDModel/CollectionSeparator').text else: if CSheet.find('CollectionID').text: collectionID = CSheet.find('CollectionID').text else: collectionID = "" series_separator = CSheet.find('IDModel/SeriesSeparator').text access_use_lower(arch_root, CSheet.find('Access'), "accessrestrict", collectionID, series_separator) access_use_lower(arch_root, CSheet.find('UseRestrictions'), "userestrict", collectionID, series_separator) #Acquisitions matched to lower levels: from archdesc_lower import acquisitions_lower acquisitions_lower(arch_root, CSheet.find('AcquisitionInfo'), version, "acqinfo", collectionID, series_separator) #Controlled Access Headings matched to lower levels: from archdesc_lower import controlaccess_lower controlaccess_lower(arch_root, CSheet.find('ControlledAccess'), version, "controlaccess", collectionID, series_separator) #Related Material matched to lower levels: from archdesc_lower import relatedmaterial_lower relatedmaterial_lower(arch_root, CSheet.find('RelatedPublications'), CSheet.find('RelatedManuscripts'), version, "relatedmaterial", collectionID, series_separator) #Relations matched to lower levels: if version == "ead3": from relations import relations_lower relations_lower(arch_root, CSheet.find('Relations'), version, "relations", collectionID, series_separator) #Simple archdesc elements matched to lower levels: from archdescsimple import archdescsimple_lower if CSheet.find('CollectionID').text and CSheet.find('IDModel/CollectionSeparator').text: collectionID = CSheet.find('CollectionID').text + CSheet.find('IDModel/CollectionSeparator').text else: if CSheet.find('CollectionID').text: collectionID = CSheet.find('CollectionID').text else: collectionID = "" series_separator = CSheet.find('IDModel/SeriesSeparator').text archdescsimple_lower(arch_root, CSheet.find('Accruals'), "accruals", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('AlternateForms'), "altformavail", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('AppraisalInfo'), "appraisal", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('LowerLevelHist'), "bioghist", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('CollectionArrangement'), "arrangement", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('CustodialHistory'), "custodhist", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('LegalStatus'), "legalstatus", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('LocationOriginals'), "originalsloc", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('OtherFindingAids'), "otherfindaid", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('PhysicalTechnical'), "phystech", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('ProcessingInformation'), "processinfo", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('LowerLevelScope'), "scopecontent", collectionID, series_separator) archdescsimple_lower(arch_root, CSheet.find('SeparatedMaterial'), "separatedmaterial", collectionID, series_separator)
44.679852
170
0.681791
import xml.etree.cElementTree as ET from archdescsimple import archdescsimple from access_use_restrict import access_use_restrict import globals import wx def archdesc(arch_root, CSheet, version, input_data): from wx.lib.pubsub import pub if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing <archdesc>...") if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing collection-level <did>...") from collection_did import collection_did cdid_root = arch_root.find('did') collection_did(cdid_root, CSheet, version) if "ask_gui" in globals.new_elements: wx.CallAfter(pub.sendMessage, "update", msg="Writing <archdesc> elements...") if "add_accessrestrict" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False access_use_restrict(arch_root, CSheet.find('Access'), "accessrestrict", "Access", add) if "add_accruals" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "accruals", CSheet.find('Accruals'), CSheet.find('Accruals/Accrual'), add) if CSheet.find('AcquisitionInfo/Acquis/Event') is None: pass else: if CSheet.find('AcquisitionInfo/Acquis/Event').text: if arch_root.find('acqinfo') is None: if "add_acq" in globals.new_elements or "add-all" in globals.add_all: acq_element = ET.Element('acqinfo') arch_root.insert(1, acq_element) count = 0 for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text and acquis.find('Date').text: count = count + 1 if count > 1: chronlist_element = ET.Element('chronlist') arch_root.find('acqinfo').append(chronlist_element) for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: chronitem_element = ET.Element('chronitem') chronlist_element.append(chronitem_element) event_element = ET.Element('event') chronitem_element.append(event_element) event_element.text = acquis.find('Event').text if version == "ead3": if acquis.find('Date').text: from date import basic_date chronitem_element.append(basic_date(acquis.find('Date').text, acquis.find('DateNormal').text, 'inclusive')) else: date_element = ET.Element('date') if acquis.find('Date').text: chronitem_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: p_element = ET.Element('p') arch_root.find('acqinfo').append(p_element) p_element.text = acquis.find('Event').text date_element = ET.Element('date') if acquis.find('Date').text: p_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: old_acquis = arch_root.find('acqinfo').attrib old_head = arch_root.find('acqinfo/head') arch_root.find('acqinfo').clear() if old_acquis is None: pass else: arch_root.find('acqinfo').attrib = old_acquis if old_head is None: pass else: arch_root.find('acqinfo').append(old_head) count = 0 for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text and acquis.find('Date').text: count = count + 1 if count > 1: chronlist_element = ET.Element('chronlist') arch_root.find('acqinfo').append(chronlist_element) for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: chronitem_element = ET.Element('chronitem') chronlist_element.append(chronitem_element) event_element = ET.Element('event') chronitem_element.append(event_element) event_element.text = acquis.find('Event').text if version == "ead3": if acquis.find('Date').text: from date import basic_date chronitem_element.append(basic_date(acquis.find('Date').text, acquis.find('DateNormal').text, 'inclusive')) else: date_element = ET.Element('date') if acquis.find('Date').text: chronitem_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: for acquis in CSheet.find('AcquisitionInfo'): if acquis.find('Event').text: p_element = ET.Element('p') arch_root.find('acqinfo').append(p_element) p_element.text = acquis.find('Event').text date_element = ET.Element('date') if acquis.find('Date').text: p_element.append(date_element) date_element.text = acquis.find('Date').text if acquis.find('DateNormal').text: date_element.set('normal', acquis.find('DateNormal').text) else: date_element.set('normal', acquis.find('Date').text) else: for empty_acquis in arch_root: if empty_acquis.tag == "acqinfo": arch_root.remove(empty_acquis) if "add_altforms" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "altformavail", CSheet.find('AlternateForms'), CSheet.find('AlternateForms/Alternative'), add) if "add_appraisal" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "appraisal", CSheet.find('AppraisalInfo'), CSheet.find('AppraisalInfo/Appraisal'), add) if "add_arrange" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False if arch_root.find('arrangement/list') is None: arrange_list = False else: arrange_list = True archdescsimple(arch_root, "arrangement", CSheet.find('CollectionArrangement'), CSheet.find('CollectionArrangement/Arrangement'), add) if arrange_list == True: if CSheet.find('CollectionMap/Component/ComponentName').text: if CSheet.find('CollectionMap/Component/ComponentName').text.lower() == "no series" or CSheet.find('CollectionMap/Component/ComponentName').text.lower() == "noseries": pass else: list_element = ET.Element('list') if arch_root.find('arrangement') is None: arrangement_element = ET.Element('arrangement') arr_index = arch_root.getchildren().index(arch_root.find('dsc')) - 1 arch_root.insert(arr_index, arrangement_element) arrangement_element.append(list_element) else: arch_root.find('arrangement').append(list_element) list_element.set('type', 'simple') for cmpnt in CSheet.find('CollectionMap'): if cmpnt.find('ComponentName').text: item_element = ET.Element('item') list_element.append(item_element) if cmpnt.find('ComponentLevel').text == "1": emph_element = ET.Element('emph') item_element.append(emph_element) emph_element.set('render', 'bold') if cmpnt.find('ComponentNumber').text: emph_element.text = "Series " + cmpnt.find('ComponentNumber').text + " - " + cmpnt.find('ComponentName').text else: emph_element.text = "Series" + " - " + cmpnt.find('ComponentName').text cmpnt_num = cmpnt.find('ComponentNumber').text for ComponentSheet in input_data: if ComponentSheet.find('SeriesNumber') is None: pass elif ComponentSheet.find('SeriesNumber').text == cmpnt_num: cmpnt_info = ComponentSheet if cmpnt_info.find('SeriesDate').text: emph_element.tail = ", " + cmpnt_info.find('SeriesDate').text else: if cmpnt.find('ComponentNumber').text: item_element.text = "Subseries " + cmpnt.find('ComponentNumber').text + ": " + cmpnt.find('ComponentName').text else: item_element.text = "Subseries" + ": " + cmpnt.find('ComponentName').text cmpnt_num = cmpnt.find('ComponentNumber').text for ComponentSheet in input_data: if ComponentSheet.find('SeriesNumber') is None: pass elif ComponentSheet.find('SeriesNumber').text == cmpnt_num: cmpnt_info = ComponentSheet if cmpnt_info.find('SeriesDate').text: emph_element.tail = ", " + cmpnt_info.find('SeriesDate').text if CSheet.find('PublicationBibliography/Publication/Title').text or CSheet.find('ManuscriptBibliography/Manuscript/UnitTitle').text: if arch_root.find('bibliography') is None: if "add_biblio" in globals.new_elements or "add-all" in globals.add_all: biblio_element = ET.Element('bibliography') biblio_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(biblio_index, biblio_element) if CSheet.find('BibliographyNote').text: p_element = ET.Element('p') biblio_element.append(p_element) p_element.text = CSheet.find('BibliographyNote').text for pub in CSheet.find('PublicationBibliography'): if pub.find('Author').text or pub.find('Title').text or pub.find('Citation').text: bibref_element = ET.Element('bibref') biblio_element.append(bibref_element) if pub.find('Author').text: bibref_element.text = pub.find('Author').text + ", " if pub.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = pub.find('Title').text if pub.find('Citation').text: title_element.tail = " " + pub.find('Citation').text + ", " if pub.find('Date').text: date_element = ET.Element('date') if version == "ead3": bibref_element.append(date_element) date_element.text = pub.find('Date').text else: if pub.find('Title').text: title_element.append(date_element) date_element.text = pub.find('Date').text else: bibref_element.append(date_element) date_element.text = pub.find('Date').text if pub.find('NormalDate').text: date_element.set("normal", pub.find('NormalDate').text) if pub.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = pub.find('Reference').text if pub.find('ReferenceLink').text: ref_element.set('href', pub.find('ReferenceLink').text) for man in CSheet.find('ManuscriptBibliography'): if man.find('Collection').text or man.find('UnitID').text or man.find('UnitID').text: archref_element = ET.Element('archref') arch_root.find('bibliography').append(archref_element) if man.find('Collection').text: archref_element.text = man.find('Collection').text + ", " if man.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = man.find('UnitTitle').text if man.find('UnitID').text: title_element.tail = " " + man.find('UnitID').text + ", " if man.find('Date').text: date_element = ET.Element('date') if version == "ead3": archref_element.append(date_element) date_element.text = man.find('Date').text else: if man.find('UnitTitle').text: title_element.append(date_element) date_element.text = man.find('Date').text else: archref_element.append(date_element) date_element.text = man.find('Date').text if man.find('NormalDate').text: date_element.set("normal", man.find('NormalDate').text) if man.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = man.find('Reference').text if man.find('ReferenceLink').text: ref_element.set('href', man.find('ReferenceLink').text) else: old_biblio = arch_root.find('bibliography').attrib old_head = arch_root.find('bibliography/head') arch_root.find('bibliography').clear() if old_biblio is None: pass else: arch_root.find('bibliography').attrib = old_biblio if old_head is None: pass else: arch_root.find('bibliography').append(old_head) if CSheet.find('BibliographyNote').text: p_element = ET.Element('p') arch_root.find('bibliography').append(p_element) p_element.text = CSheet.find('BibliographyNote').text for pub in CSheet.find('PublicationBibliography'): if pub.find('Author').text or pub.find('Title').text or pub.find('Citation').text: bibref_element = ET.Element('bibref') arch_root.find('bibliography').append(bibref_element) if pub.find('Author').text: bibref_element.text = pub.find('Author').text + ", " if pub.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = pub.find('Title').text if pub.find('Citation').text: title_element.tail = " " + pub.find('Citation').text + ", " if pub.find('Date').text: date_element = ET.Element('date') if version == "ead3": bibref_element.append(date_element) date_element.text = pub.find('Date').text else: if pub.find('Title').text: title_element.append(date_element) date_element.text = pub.find('Date').text else: bibref_element.append(date_element) date_element.text = pub.find('Date').text if pub.find('NormalDate').text: date_element.set("normal", pub.find('NormalDate').text) if pub.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = pub.find('Reference').text if pub.find('ReferenceLink').text: ref_element.set('href', pub.find('ReferenceLink').text) for man in CSheet.find('ManuscriptBibliography'): if man.find('Collection').text or man.find('UnitID').text or man.find('UnitID').text: archref_element = ET.Element('archref') arch_root.find('bibliography').append(archref_element) if man.find('Collection').text: archref_element.text = man.find('Collection').text + ", " if man.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = man.find('UnitTitle').text if man.find('UnitID').text: title_element.tail = " " + man.find('UnitID').text + ", " if man.find('Date').text: date_element = ET.Element('date') if version == "ead3": archref_element.append(date_element) date_element.text = man.find('Date').text else: if man.find('UnitTitle').text: title_element.append(date_element) date_element.text = man.find('Date').text else: archref_element.append(date_element) date_element.text = man.find('Date').text if man.find('NormalDate').text: date_element.set("normal", man.find('NormalDate').text) if man.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = man.find('Reference').text if man.find('ReferenceLink').text: ref_element.set('href', man.find('ReferenceLink').text) else: old_biblio_list = arch_root.findall('bibliography') for old_biblio in old_biblio_list: arch_root.remove(old_biblio) if CSheet.find('HistoricalNote/p') is None: pass else: if CSheet.find('HistoricalNote/p').text: if arch_root.find('bioghist') is None: if "add_bio" in globals.new_elements or "add-all" in globals.add_all: bio_element = ET.Element('bioghist') bio_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(bio_index, bio_element) if CSheet.find('HistoricalNoteTitle').text: head_element = ET.Element('head') arch_root.find('bioghist').append(head_element) head_element.text = CSheet.find('HistoricalNoteTitle').text for para in CSheet.find('HistoricalNote'): p_element = ET.Element('p') bio_element.append(p_element) p_element.text = para.text else: arch_root.find('bioghist').clear() if CSheet.find('HistoricalNoteTitle').text: head_element = ET.Element('head') arch_root.find('bioghist').append(head_element) head_element.text = CSheet.find('HistoricalNoteTitle').text for para in CSheet.find('HistoricalNote'): p_element = ET.Element('p') arch_root.find('bioghist').append(p_element) p_element.text = para.text else: old_hist_list = arch_root.findall('bioghist') for old_hist in old_hist_list: arch_root.remove(old_hist) old_access = arch_root.find('controlaccess') if CSheet.find('ControlledAccess/AccessPoint/Part') is None or CSheet.find('ControlledAccess/AccessPoint/ElementName') is None: pass else: if CSheet.find('ControlledAccess/AccessPoint/Part').text and CSheet.find('ControlledAccess/AccessPoint/ElementName').text: if arch_root.find('controlaccess') is None: if "add_controlaccess" in globals.new_elements or "add-all" in globals.add_all: access_element = ET.Element('controlaccess') access_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(access_index, access_element) for access in CSheet.find('ControlledAccess'): if access.find('UnitID').text: pass else: if access.find('Part').text and access.find('ElementName').text: new_element = ET.Element(access.find('ElementName').text) access_element.append(new_element) if version == "ead2002": new_element.text = access.find('Part').text else: part_element = ET.Element('part') new_element.append(part_element) part_element.text = access.find('Part').text if access.find('MARCEncoding').text: new_element.set('encodinganalog', access.find('MARCEncoding').text) if access.find('Identifier').text: if version == "ead3": new_element.set('identifier', access.find('Identifier').text) else: new_element.set('id', access.find('Identifier').text) if access.find('Relator').text: if version == "ead3": new_element.set('relator', access.find('Relator').text) else: new_element.set('role', access.find('Relator').text) if access.find('Normal').text: new_element.set('normal', access.find('Normal').text) if access.find('Source').text: new_element.set('source', access.find('Source').text) else: if access.find('Part').text or access.find('ElementName').text: from messages import error error("All Access Headings must have both an Element Name and a Heading, headings without these fields will not be encoded.", False) else: old_access = arch_root.find('controlaccess').attrib old_access_list = arch_root.findall('controlaccess') for old_access_ele in old_access_list: arch_root.remove(old_access_ele) access_element = ET.Element('controlaccess') access_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(access_index, access_element) if old_access is None: pass else: access_element.attrib = old_access for access in CSheet.find('ControlledAccess'): if access.find('UnitID').text: pass else: if access.find('Part').text and access.find('ElementName').text: new_element = ET.Element(access.find('ElementName').text) access_element.append(new_element) if version == "ead2002": new_element.text = access.find('Part').text else: part_element = ET.Element('part') new_element.append(part_element) part_element.text = access.find('Part').text if access.find('MARCEncoding').text: new_element.set('encodinganalog', access.find('MARCEncoding').text) if access.find('Identifier').text: if version == "ead3": new_element.set('identifier', access.find('Identifier').text) else: new_element.set('id', access.find('Identifier').text) if access.find('Relator').text: if version == "ead3": new_element.set('relator', access.find('Relator').text) else: new_element.set('role', access.find('Relator').text) if access.find('Normal').text: new_element.set('normal', access.find('Normal').text) if access.find('Source').text: new_element.set('source', access.find('Source').text) else: if access.find('Part').text or access.find('ElementName').text: from messages import error error("All Access Headings must have both an Element Name and a Heading, headings without these fields will not be encoded.", False) else: old_ca_list = arch_root.findall('controlaccess') for old_ca in old_ca_list: arch_root.remove(old_ca) if "add_custhistory" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "custodhist", CSheet.find('CustodialHistory'), CSheet.find('CustodialHistory/Event'), add) if "add_legalstatus" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "legalstatus", CSheet.find('LegalStatus'), CSheet.find('LegalStatus/Status'), add) if "add_originalsloc" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "originalsloc", CSheet.find('LocationOriginals'), CSheet.find('LocationOriginals/Location'), add) if "add_otherfa" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "otherfindaid", CSheet.find('OtherFindingAids'), CSheet.find('OtherFindingAids/Other'), add) if "add_phystech" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "phystech", CSheet.find('PhysicalTechnical'), CSheet.find('PhysicalTechnical/Details'), add) if "add_prefcite" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "prefercite", CSheet.find('PreferredCitation'), CSheet.find('PreferredCitation/Example'), add) if "add_processinfo" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "processinfo", CSheet.find('ProcessingInformation'), CSheet.find('ProcessingInformation/Details'), add) if CSheet.find('RelatedPublications/Publication/Title') is None or CSheet.find('RelatedManuscripts/Manuscript/UnitTitle') is None: pass else: if CSheet.find('RelatedPublications/Publication/Title').text or CSheet.find('RelatedManuscripts/Manuscript/UnitTitle').text: if arch_root.find('relatedmaterial') is None: if "add_related" in globals.new_elements or "add-all" in globals.add_all: related_element = ET.Element('relatedmaterial') related_index = arch_root.getchildren().index(arch_root.find('dsc')) arch_root.insert(related_index, related_element) if CSheet.find('RelatedMaterialNotes') is None: pass else: for note in CSheet.find('RelatedMaterialNotes'): if note.text: p_element = ET.Element('p') related_element.append(p_element) p_element.text = note.text for related in CSheet.find('RelatedPublications'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if related.find('Author').text or related.find('Title').text or related.find('Citation').text: bibref_element = ET.Element('bibref') related_element.append(bibref_element) if related.find('Author').text: bibref_element.text = related.find('Author').text + ", " if related.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = related.find('Title').text if related.find('Citation').text: title_element.tail = " " + related.find('Citation').text + ", " if related.find('Date').text: date_element = ET.Element('date') bibref_element.append(date_element) date_element.text = related.find('Date').text if related.find('NormalDate').text: date_element.set("normal", related.find('NormalDate').text) if related.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = related.find('Reference').text if related.find('ReferenceLink').text: ref_element.set('href', related.find('ReferenceLink').text) for relatedman in CSheet.find('RelatedManuscripts'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if relatedman.find('Collection').text or relatedman.find('UnitTitle').text or relatedman.find('MaterialID').text: archref_element = ET.Element('archref') arch_root.find('relatedmaterial').append(archref_element) if relatedman.find('Collection').text: archref_element.text = relatedman.find('Collection').text + ", " if relatedman.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = relatedman.find('UnitTitle').text if relatedman.find('MaterialID').text: title_element.tail = " " + relatedman.find('MaterialID').text + ", " if relatedman.find('Date').text: date_element = ET.Element('date') archref_element.append(date_element) date_element.text = relatedman.find('Date').text if relatedman.find('NormalDate').text: date_element.set("normal", relatedman.find('NormalDate').text) if relatedman.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = relatedman.find('Reference').text if relatedman.find('ReferenceLink').text: ref_element.set('href', relatedman.find('ReferenceLink').text) else: old_related = arch_root.find('relatedmaterial').attrib old_head = arch_root.find('relatedmaterial/head') arch_root.find('relatedmaterial').clear() if old_related is None: pass else: arch_root.find('relatedmaterial').attrib = old_related if old_head is None: pass else: arch_root.find('relatedmaterial').append(old_head) for note in CSheet.find('RelatedMaterialNotes'): if note.text: p_element = ET.Element('p') arch_root.find('relatedmaterial').append(p_element) p_element.text = note.text for related in CSheet.find('RelatedPublications'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if related.find('Author').text or related.find('Title').text or related.find('Citation').text: bibref_element = ET.Element('bibref') arch_root.find('relatedmaterial').append(bibref_element) if related.find('Author').text: bibref_element.text = related.find('Author').text + ", " if related.find('Title').text: title_element = ET.Element('title') bibref_element.append(title_element) title_element.text = related.find('Title').text if related.find('Citation').text: title_element.tail = " " + related.find('Citation').text + ", " if related.find('Date').text: date_element = ET.Element('date') bibref_element.append(date_element) date_element.text = related.find('Date').text if related.find('NormalDate').text: date_element.set("normal", related.find('NormalDate').text) if related.find('Reference').text: ref_element = ET.Element('ref') bibref_element.append(ref_element) ref_element.text = related.find('Reference').text if related.find('ReferenceLink').text: ref_element.set('href', related.find('ReferenceLink').text) for relatedman in CSheet.find('RelatedManuscripts'): if related.find('UnitID') is None: pass else: if related.find('UnitID').text: pass else: if relatedman.find('Collection').text or relatedman.find('UnitTitle').text or relatedman.find('MaterialID').text: archref_element = ET.Element('archref') arch_root.find('relatedmaterial').append(archref_element) if relatedman.find('Collection').text: archref_element.text = relatedman.find('Collection').text + ", " if relatedman.find('UnitTitle').text: title_element = ET.Element('title') archref_element.append(title_element) title_element.text = relatedman.find('UnitTitle').text if relatedman.find('MaterialID').text: title_element.tail = " " + relatedman.find('MaterialID').text + ", " if relatedman.find('Date').text: date_element = ET.Element('date') archref_element.append(date_element) date_element.text = relatedman.find('Date').text if relatedman.find('NormalDate').text: date_element.set("normal", relatedman.find('NormalDate').text) if relatedman.find('Reference').text: ref_element = ET.Element('ref') archref_element.append(ref_element) ref_element.text = relatedman.find('Reference').text if relatedman.find('ReferenceLink').text: ref_element.set('href', relatedman.find('ReferenceLink').text) else: old_related_list = arch_root.findall('relatedmaterial') for old_related in old_related_list: arch_root.remove(old_related) from relations import relations if version == "ead3": relations(arch_root, CSheet.find('Relations')) if "add_scope" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "scopecontent", CSheet.find('ScopeContent'), CSheet.find('ScopeContent/p'), add) if "add_sepmat" in globals.new_elements or "add-all" in globals.add_all: add = True else: add = False archdescsimple(arch_root, "separatedmaterial", CSheet.find('SeparatedMaterial'), CSheet.find('SeparatedMaterial/Material'), add) if "add_userestrict" in globals.new_elements or "add-all" in globals.add_all: add is True else: add is False access_use_restrict(arch_root, CSheet.find('UseRestrictions'), "userestrict", "Use", add) from dsc import dsc dsc(arch_root.find('dsc'), input_data, version)
true
true
f711caf5313ea4bac3e7b67357976b45c1301dc6
450
py
Python
stream/migrations/0017_stream_active.py
maddevsio/yourcast-web
c3e897f28dc16f71e4f625564270c0d3b72fa53f
[ "MIT" ]
8
2017-05-12T10:08:03.000Z
2020-12-22T00:01:39.000Z
stream/migrations/0017_stream_active.py
maddevsio/yourcast-web
c3e897f28dc16f71e4f625564270c0d3b72fa53f
[ "MIT" ]
null
null
null
stream/migrations/0017_stream_active.py
maddevsio/yourcast-web
c3e897f28dc16f71e4f625564270c0d3b72fa53f
[ "MIT" ]
6
2017-05-12T13:35:43.000Z
2021-08-09T13:43:31.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-10-17 05:50 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stream', '0016_auto_20161012_0838'), ] operations = [ migrations.AddField( model_name='stream', name='active', field=models.BooleanField(default=False), ), ]
21.428571
53
0.615556
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('stream', '0016_auto_20161012_0838'), ] operations = [ migrations.AddField( model_name='stream', name='active', field=models.BooleanField(default=False), ), ]
true
true
f711cdf5b7894bb3864580ebce3e6bec3eee4261
3,358
py
Python
breakout_env/wrappers/wrappers.py
MarcoFavorito/breakout-env
b41f9ed1da693874d7d34f83e7200fd51a59c97e
[ "MIT" ]
null
null
null
breakout_env/wrappers/wrappers.py
MarcoFavorito/breakout-env
b41f9ed1da693874d7d34f83e7200fd51a59c97e
[ "MIT" ]
null
null
null
breakout_env/wrappers/wrappers.py
MarcoFavorito/breakout-env
b41f9ed1da693874d7d34f83e7200fd51a59c97e
[ "MIT" ]
null
null
null
import copy from gym import Wrapper from pythogic.base.Symbol import Symbol from pythogic.base.Alphabet import Alphabet from pythogic.base.Formula import AtomicFormula, PathExpressionEventually, PathExpressionSequence, And, Not, \ LogicalTrue, PathExpressionStar from pythogic.base.utils import _to_pythomata_dfa from pythogic.ldlf_empty_traces.LDLf_EmptyTraces import LDLf_EmptyTraces import numpy as np from pythomata.base.Simulator import Simulator from pythomata.base.utils import Sink class BreakoutRABUWrapper(Wrapper): """Env wrapper for bottom-up rows deletion""" def __init__(self, env): super().__init__(env) self.row_symbols = [Symbol(r) for r in ["r0", "r1", "r2"]] self.dfa = self._build_automata() self.goal_reward = 1000 self.transition_reward = 100 self.simulator = Simulator(self.dfa) self.last_status = None def reset(self): self.env.reset() self.simulator.reset() def step(self, action): obs, reward, done, _ = self.env.step(action) if done: # when we lose a life return obs, reward, done, _ # overwrite old reward # reward = 0 f = self.state2propositional_formula() old_state = self.simulator.cur_state self.simulator.make_transition(f) new_state = self.simulator.cur_state if new_state==Sink(): done = True reward = -1000 elif new_state in self.dfa.accepting_states: reward = 1000 elif old_state!=new_state: reward = self.transition_reward return obs, reward, done or self.env.unwrapped.state.terminal, _ def state2propositional_formula(self): e = self.unwrapped matrix = e.state.bricks.bricks_status_matrix row_status = np.all(matrix==0.0, axis=1) result = set() for rs, sym in zip(row_status, reversed(self.row_symbols)): if rs: result.add(sym) return frozenset(result) def _build_automata(self): rows = self.row_symbols atoms = [AtomicFormula(r) for r in rows] alphabet = Alphabet(set(rows)) ldlf = LDLf_EmptyTraces(alphabet) f = PathExpressionEventually( PathExpressionSequence.chain([ PathExpressionStar(And.chain([Not(atoms[0]), Not(atoms[1]), Not(atoms[2])])), PathExpressionStar(And.chain([atoms[0], Not(atoms[1]), Not(atoms[2])])), # Not(atoms[3]), Not(atoms[4]), Not(atoms[5])]), PathExpressionStar(And.chain([atoms[0], atoms[1], Not(atoms[2])])), # Not(atoms[3]), Not(atoms[4]), Not(atoms[5])]), # And.chain([atoms[0], atoms[1], atoms[2]]), # Not(atoms[3]), Not(atoms[4]), Not(atoms[5])]), # And.chain([atoms[0], atoms[1], atoms[2], atoms[3], Not(atoms[4]), Not(atoms[5])]), # And.chain([atoms[0], atoms[1], atoms[2], atoms[3], atoms[4], Not(atoms[5])]), # And.chain([atoms[0], atoms[1], atoms[2], atoms[3], atoms[4], atoms[5] ]) ]), And.chain([atoms[0], atoms[1], atoms[2]]) ) nfa = ldlf.to_nfa(f) dfa = _to_pythomata_dfa(nfa) return dfa
34.979167
120
0.590232
import copy from gym import Wrapper from pythogic.base.Symbol import Symbol from pythogic.base.Alphabet import Alphabet from pythogic.base.Formula import AtomicFormula, PathExpressionEventually, PathExpressionSequence, And, Not, \ LogicalTrue, PathExpressionStar from pythogic.base.utils import _to_pythomata_dfa from pythogic.ldlf_empty_traces.LDLf_EmptyTraces import LDLf_EmptyTraces import numpy as np from pythomata.base.Simulator import Simulator from pythomata.base.utils import Sink class BreakoutRABUWrapper(Wrapper): def __init__(self, env): super().__init__(env) self.row_symbols = [Symbol(r) for r in ["r0", "r1", "r2"]] self.dfa = self._build_automata() self.goal_reward = 1000 self.transition_reward = 100 self.simulator = Simulator(self.dfa) self.last_status = None def reset(self): self.env.reset() self.simulator.reset() def step(self, action): obs, reward, done, _ = self.env.step(action) if done: return obs, reward, done, _ f = self.state2propositional_formula() old_state = self.simulator.cur_state self.simulator.make_transition(f) new_state = self.simulator.cur_state if new_state==Sink(): done = True reward = -1000 elif new_state in self.dfa.accepting_states: reward = 1000 elif old_state!=new_state: reward = self.transition_reward return obs, reward, done or self.env.unwrapped.state.terminal, _ def state2propositional_formula(self): e = self.unwrapped matrix = e.state.bricks.bricks_status_matrix row_status = np.all(matrix==0.0, axis=1) result = set() for rs, sym in zip(row_status, reversed(self.row_symbols)): if rs: result.add(sym) return frozenset(result) def _build_automata(self): rows = self.row_symbols atoms = [AtomicFormula(r) for r in rows] alphabet = Alphabet(set(rows)) ldlf = LDLf_EmptyTraces(alphabet) f = PathExpressionEventually( PathExpressionSequence.chain([ PathExpressionStar(And.chain([Not(atoms[0]), Not(atoms[1]), Not(atoms[2])])), PathExpressionStar(And.chain([atoms[0], Not(atoms[1]), Not(atoms[2])])), PathExpressionStar(And.chain([atoms[0], atoms[1], Not(atoms[2])])), ]), And.chain([atoms[0], atoms[1], atoms[2]]) ) nfa = ldlf.to_nfa(f) dfa = _to_pythomata_dfa(nfa) return dfa
true
true
f711cf7097563239c0e13f07d13e566ab1a1133f
186
py
Python
krankit/polls/admin.py
ruankranz/blog
d83adc9035bb71f839e8e1c74a036f99be7f9d18
[ "MIT" ]
null
null
null
krankit/polls/admin.py
ruankranz/blog
d83adc9035bb71f839e8e1c74a036f99be7f9d18
[ "MIT" ]
1
2021-05-11T12:43:52.000Z
2021-05-11T12:43:52.000Z
krankit/polls/admin.py
ruankranz/blog
d83adc9035bb71f839e8e1c74a036f99be7f9d18
[ "MIT" ]
null
null
null
from django.contrib import admin from krankit.polls.models import Question, Choice, ChoiceVote admin.site.register(Question) admin.site.register(Choice) admin.site.register(ChoiceVote)
26.571429
61
0.833333
from django.contrib import admin from krankit.polls.models import Question, Choice, ChoiceVote admin.site.register(Question) admin.site.register(Choice) admin.site.register(ChoiceVote)
true
true
f711d14a97d591dacc7c04a236c7bfbe2942d778
6,292
py
Python
experiments/plots_journal_CARS.py
salomonw/mixed-traffic-amod-route-rebalance
7f1edeb195a7bfab835e596ad84deead2957943e
[ "MIT" ]
1
2022-03-07T16:15:56.000Z
2022-03-07T16:15:56.000Z
experiments/plots_journal_CARS.py
salomonw/mixed-traffic-amod-route-rebalance
7f1edeb195a7bfab835e596ad84deead2957943e
[ "MIT" ]
null
null
null
experiments/plots_journal_CARS.py
salomonw/mixed-traffic-amod-route-rebalance
7f1edeb195a7bfab835e596ad84deead2957943e
[ "MIT" ]
null
null
null
import src.tnet as tnet import matplotlib.pyplot as plt import matplotlib as mpl import pandas as pd import math plt.style.use(['science','ieee', 'high-vis']) def txt2list(fname): return [line for line in open(fname)] def read_result(fname): df = pd.read_csv(fname) results = df.T.values.tolist() return results def read_parameters(fname): dic = {} for line in open(fname, 'r').readlines(): p,v = line.split() dic[p] = v return dic def plot_topology(netname): netFile, gFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters(net_name=netname, experiment_name=netname + 'topo_plot') tNet = tnet.tNet(netFile=netFile, gFile=gFile, fcoeffs=fcoeffs) tNet.read_node_coordinates('data/pos/'+netname+'.txt') fig, ax = tnet.plot_network(tNet.G, width=0.3) return fig, ax def plot_convergance(fname_sys, fname_usr): return 1 def plot_costPenRate(fname, ax, parameters, k): j, cavsCost, noncavsCost, totCost, cavsFlow, nonCavsFlow, pedestrianFlow, rebalancingFlow, bikeFlow, subwayFlow = read_result(fname) if k == 'A': for i in range(len(cavsCost)): cavsCost[i] = max(noncavsCost[i], cavsCost[i]) totCost[i] = max(noncavsCost[i], totCost[i]) j = [round(.1 * i, 1) for i in range(11)] lstyle = ['-', '--', ':'] i = 0 alg = 'CARS'+parameters['n:'] ax.plot(j, noncavsCost, label='Private', linestyle=lstyle[i], linewidth=2, marker='x') ax.plot(j, cavsCost, label='AMoDs', linestyle=lstyle[i], linewidth=2, marker="^") ax.plot(j, totCost, label='Total', linestyle=lstyle[i], linewidth=2, marker='o') ax.legend() ax.set_xlabel('Penetration Rate') ax.set_ylabel('Avg. Travel Time (min)') ax.set_xlim((0, 1)) ax.legend(framealpha=0.8, fontsize='small', frameon=True, facecolor='w', fancybox='False') #ax.legend.get_frame().set_linewidth(0.2) return ax def plot_flowPenRate(fname, ax, parameters): n, cavsCost, noncavsCost, totCost, cavsFlow, nonCavsFlow, pedestrianFlow, rebalancingFlow, bikeFlow, subwayFlow = read_result(fname) width = 0.9 x_name = [round(.1 * i, 1) for i in range(11)] x = list(range(len(x_name))) p1 = ax.bar(x, nonCavsFlow, width, label='Private') p2 = ax.bar(x, cavsFlow, width, bottom=nonCavsFlow, label='AMoD') p3 = ax.bar(x, rebalancingFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] for i in range(len(cavsFlow))], label='Rebalancing') if sum(subwayFlow)>10: p6 = ax.bar(x, subwayFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] + rebalancingFlow[i] + pedestrianFlow[i] + bikeFlow[i] for i in range(len(cavsFlow))], label='Subway') if sum(pedestrianFlow)>10: p4 = ax.bar(x, pedestrianFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] + rebalancingFlow[i] for i in range(len(cavsFlow))], label='Pedestrian') if sum(bikeFlow)>10: p5 = ax.bar(x, bikeFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] + rebalancingFlow[i] + pedestrianFlow[i] for i in range(len(cavsFlow))], label='Biking') ax.set_ylabel('Miles per mode of transport') ax.set_xlabel('Penetration rate') ax.set_xticks(x) ax.set_xticklabels(x_name) ax.legend(framealpha=0.8, fontsize='small', frameon=True, loc=3, facecolor='w', fancybox='False') #ax.legend.get_frame().set_linewidth(0.2) return ax ''' dire = '2021-01-08_11:51:44_penRate_NYC_1.5ASB_Reb_True' fname = 'results/' + dire + '/results.csv' parameters = read_parameters('results/' + dire + '/parameters.txt' ) #print(read_result(fname)) fig, ax = plt.subplots(1 ,figsize=(2.5,2)) plot_costPenRate(fname, ax, parameters) plt.savefig('a.pdf') fig, ax = plt.subplots(1 ,figsize=(3.6,2)) plot_flowPenRate(fname, ax, parameters) plt.savefig('b.pdf') ''' # comparison def plot_comparison(fnames, out): fig, ax = plt.subplots(ncols=2, nrows=len(fnames), # width_ratios=[1,2], gridspec_kw={'width_ratios':[1,2]}, figsize=(3.6*1.7, 1.7*len(fnames)), #sharex=True, sharey=False) j = 0 for f in fnames: fname = 'results/' + f + '/results.csv' parameters = read_parameters('results/' + f + '/parameters.txt' ) if out =='1c': plot_costPenRate(fname, ax[j,0], parameters, 'A') else: plot_costPenRate(fname, ax[j,0], parameters, 'B') plot_flowPenRate(fname, ax[j,1], parameters) j +=1 #plt.legend(frameon=True, fancybox=False) plt.tight_layout() plt.savefig(out+'.pdf') #plt.show() one = '2021-01-08_11/50/19_penRate_NYC_1.0A_Reb_True'.replace('/', ':') two = '2021-01-08_11/50/08_penRate_NYC_1.5A_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0A_Reb_True'.replace('/', ':') four = '2021-01-08_11/51/44_penRate_NYC_4.0A_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'1c') one = '2021-01-08_11/50/19_penRate_NYC_1.0AS_Reb_True'.replace('/', ':') two = '2021-01-08_11/50/08_penRate_NYC_1.5AS_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0AS_Reb_True'.replace('/', ':') four = '2021-01-08_11/51/43_penRate_NYC_4.0AS_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'1_5c') one = '2021-01-08_11/50/08_penRate_NYC_1.0ASP_Reb_True'.replace('/', ':') two = '2021-01-08_11/51/48_penRate_NYC_1.5ASP_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0ASP_Reb_True'.replace('/', ':') four = '2021-01-08_11/52/40_penRate_NYC_4.0ASP_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'2c') one = '2021-01-08_11/50/08_penRate_NYC_1.0ASPB_Reb_True'.replace('/', ':') two = '2021-01-08_11/51/44_penRate_NYC_1.5ASPB_Reb_True'.replace('/', ':') three = '2021-01-12_00:58:41_penRate_NYC_2.0ASPB_Reb_True'.replace('/', ':') four = '2021-01-14_02:00:28_penRate_NYC_4.0ASPB_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'4c') one = '2021-01-08_11/51/44_penRate_NYC_2.0A_Reb_True'.replace('/', ':') two = '2021-01-08_11/51/44_penRate_NYC_2.0AS_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0ASP_Reb_True'.replace('/', ':') four = '2021-01-12_00:58:41_penRate_NYC_2.0ASPB_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'4c')
34.571429
133
0.672282
import src.tnet as tnet import matplotlib.pyplot as plt import matplotlib as mpl import pandas as pd import math plt.style.use(['science','ieee', 'high-vis']) def txt2list(fname): return [line for line in open(fname)] def read_result(fname): df = pd.read_csv(fname) results = df.T.values.tolist() return results def read_parameters(fname): dic = {} for line in open(fname, 'r').readlines(): p,v = line.split() dic[p] = v return dic def plot_topology(netname): netFile, gFile, fcoeffs, tstamp, dir_out = tnet.get_network_parameters(net_name=netname, experiment_name=netname + 'topo_plot') tNet = tnet.tNet(netFile=netFile, gFile=gFile, fcoeffs=fcoeffs) tNet.read_node_coordinates('data/pos/'+netname+'.txt') fig, ax = tnet.plot_network(tNet.G, width=0.3) return fig, ax def plot_convergance(fname_sys, fname_usr): return 1 def plot_costPenRate(fname, ax, parameters, k): j, cavsCost, noncavsCost, totCost, cavsFlow, nonCavsFlow, pedestrianFlow, rebalancingFlow, bikeFlow, subwayFlow = read_result(fname) if k == 'A': for i in range(len(cavsCost)): cavsCost[i] = max(noncavsCost[i], cavsCost[i]) totCost[i] = max(noncavsCost[i], totCost[i]) j = [round(.1 * i, 1) for i in range(11)] lstyle = ['-', '--', ':'] i = 0 alg = 'CARS'+parameters['n:'] ax.plot(j, noncavsCost, label='Private', linestyle=lstyle[i], linewidth=2, marker='x') ax.plot(j, cavsCost, label='AMoDs', linestyle=lstyle[i], linewidth=2, marker="^") ax.plot(j, totCost, label='Total', linestyle=lstyle[i], linewidth=2, marker='o') ax.legend() ax.set_xlabel('Penetration Rate') ax.set_ylabel('Avg. Travel Time (min)') ax.set_xlim((0, 1)) ax.legend(framealpha=0.8, fontsize='small', frameon=True, facecolor='w', fancybox='False') return ax def plot_flowPenRate(fname, ax, parameters): n, cavsCost, noncavsCost, totCost, cavsFlow, nonCavsFlow, pedestrianFlow, rebalancingFlow, bikeFlow, subwayFlow = read_result(fname) width = 0.9 x_name = [round(.1 * i, 1) for i in range(11)] x = list(range(len(x_name))) p1 = ax.bar(x, nonCavsFlow, width, label='Private') p2 = ax.bar(x, cavsFlow, width, bottom=nonCavsFlow, label='AMoD') p3 = ax.bar(x, rebalancingFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] for i in range(len(cavsFlow))], label='Rebalancing') if sum(subwayFlow)>10: p6 = ax.bar(x, subwayFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] + rebalancingFlow[i] + pedestrianFlow[i] + bikeFlow[i] for i in range(len(cavsFlow))], label='Subway') if sum(pedestrianFlow)>10: p4 = ax.bar(x, pedestrianFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] + rebalancingFlow[i] for i in range(len(cavsFlow))], label='Pedestrian') if sum(bikeFlow)>10: p5 = ax.bar(x, bikeFlow, width, bottom=[cavsFlow[i] + nonCavsFlow[i] + rebalancingFlow[i] + pedestrianFlow[i] for i in range(len(cavsFlow))], label='Biking') ax.set_ylabel('Miles per mode of transport') ax.set_xlabel('Penetration rate') ax.set_xticks(x) ax.set_xticklabels(x_name) ax.legend(framealpha=0.8, fontsize='small', frameon=True, loc=3, facecolor='w', fancybox='False') return ax def plot_comparison(fnames, out): fig, ax = plt.subplots(ncols=2, nrows=len(fnames), gridspec_kw={'width_ratios':[1,2]}, figsize=(3.6*1.7, 1.7*len(fnames)), sharey=False) j = 0 for f in fnames: fname = 'results/' + f + '/results.csv' parameters = read_parameters('results/' + f + '/parameters.txt' ) if out =='1c': plot_costPenRate(fname, ax[j,0], parameters, 'A') else: plot_costPenRate(fname, ax[j,0], parameters, 'B') plot_flowPenRate(fname, ax[j,1], parameters) j +=1 plt.tight_layout() plt.savefig(out+'.pdf') one = '2021-01-08_11/50/19_penRate_NYC_1.0A_Reb_True'.replace('/', ':') two = '2021-01-08_11/50/08_penRate_NYC_1.5A_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0A_Reb_True'.replace('/', ':') four = '2021-01-08_11/51/44_penRate_NYC_4.0A_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'1c') one = '2021-01-08_11/50/19_penRate_NYC_1.0AS_Reb_True'.replace('/', ':') two = '2021-01-08_11/50/08_penRate_NYC_1.5AS_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0AS_Reb_True'.replace('/', ':') four = '2021-01-08_11/51/43_penRate_NYC_4.0AS_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'1_5c') one = '2021-01-08_11/50/08_penRate_NYC_1.0ASP_Reb_True'.replace('/', ':') two = '2021-01-08_11/51/48_penRate_NYC_1.5ASP_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0ASP_Reb_True'.replace('/', ':') four = '2021-01-08_11/52/40_penRate_NYC_4.0ASP_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'2c') one = '2021-01-08_11/50/08_penRate_NYC_1.0ASPB_Reb_True'.replace('/', ':') two = '2021-01-08_11/51/44_penRate_NYC_1.5ASPB_Reb_True'.replace('/', ':') three = '2021-01-12_00:58:41_penRate_NYC_2.0ASPB_Reb_True'.replace('/', ':') four = '2021-01-14_02:00:28_penRate_NYC_4.0ASPB_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'4c') one = '2021-01-08_11/51/44_penRate_NYC_2.0A_Reb_True'.replace('/', ':') two = '2021-01-08_11/51/44_penRate_NYC_2.0AS_Reb_True'.replace('/', ':') three = '2021-01-08_11/51/44_penRate_NYC_2.0ASP_Reb_True'.replace('/', ':') four = '2021-01-12_00:58:41_penRate_NYC_2.0ASPB_Reb_True'.replace('/', ':') fnames = [one, two, three, four] plot_comparison(fnames,'4c')
true
true
f711d31b88f8761fbd168fbb814c22a17184f4a0
335
py
Python
atop/options/tree.py
MouseAndKeyboard/All_Things_Options_Notebooks
7f7bc3f5697b3ccc7aef3311f9d81ba4d5b05435
[ "MIT" ]
3
2020-01-14T03:11:14.000Z
2020-09-09T12:51:33.000Z
atop/options/tree.py
MouseAndKeyboard/All_Things_Options_Notebooks
7f7bc3f5697b3ccc7aef3311f9d81ba4d5b05435
[ "MIT" ]
8
2020-01-07T15:36:43.000Z
2020-01-19T19:31:56.000Z
atop/options/tree.py
tmnewt/atop
9c86bb6471fae73ae51c4b5af759767aea98d8fc
[ "MIT" ]
1
2019-12-23T15:46:25.000Z
2019-12-23T15:46:25.000Z
class BinTree: def __init__(self): '''Container for structuring and handling all nodes used in an option-like asset. Creates a generic binomial option tree whose by planting an option. Ha, what? ''' class Binodes: def __init__(self, u_node, d_node): pass
19.705882
90
0.58806
class BinTree: def __init__(self): class Binodes: def __init__(self, u_node, d_node): pass
true
true
f711d44cc69e55298f730104b6bde8fa0b31d923
3,236
py
Python
profiles_project/settings.py
Nizamtechs/proflies-rest-api
7136f41aca7fe4818b66a1485a134f5e9cac3ac1
[ "MIT" ]
null
null
null
profiles_project/settings.py
Nizamtechs/proflies-rest-api
7136f41aca7fe4818b66a1485a134f5e9cac3ac1
[ "MIT" ]
8
2019-12-05T00:04:14.000Z
2022-02-10T09:44:00.000Z
profiles_project/settings.py
Nizamtechs/proflies-rest-api
7136f41aca7fe4818b66a1485a134f5e9cac3ac1
[ "MIT" ]
null
null
null
""" Django settings for profiles_project project. Generated by 'django-admin startproject' using Django 2.2. For more information on this file, see https://docs.djangoproject.com/en/2.2/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.2/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'w&u3p_ohh-&rs3i-)qwcgf55d6td29($1zch4(tudt$97foaj*' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'profiles_api', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'profiles_project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'profiles_project.wsgi.application' # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.2/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.2/howto/static-files/ STATIC_URL = '/static/' AUTH_USER_MODEL = 'profiles_api.UserProfile'
25.68254
91
0.700556
import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'w&u3p_ohh-&rs3i-)qwcgf55d6td29($1zch4(tudt$97foaj*' DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'profiles_api', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'profiles_project.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'profiles_project.wsgi.application' # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.2/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.2/howto/static-files/ STATIC_URL = '/static/' AUTH_USER_MODEL = 'profiles_api.UserProfile'
true
true
f711d49761cde4671579c6d63e52db091e581213
3,376
py
Python
utils/Paralell_Experience_Generator.py
FedePeralta/ASVs_Deep_Reinforcement_Learning_with_CNNs
23b9b181499a4b06f2ca2951c002359c1959e727
[ "MIT" ]
4
2021-03-22T12:42:55.000Z
2021-12-13T03:03:52.000Z
utils/Paralell_Experience_Generator.py
FedePeralta/ASVs_Deep_Reinforcement_Learning_with_CNNs
23b9b181499a4b06f2ca2951c002359c1959e727
[ "MIT" ]
null
null
null
utils/Paralell_Experience_Generator.py
FedePeralta/ASVs_Deep_Reinforcement_Learning_with_CNNs
23b9b181499a4b06f2ca2951c002359c1959e727
[ "MIT" ]
1
2021-03-22T12:48:21.000Z
2021-03-22T12:48:21.000Z
import random import torch import sys from contextlib import closing from torch.multiprocessing import Pool from random import randint from exploration_strategies.OUNoise import OrnsteinUhlenbeckActionNoise class Parallel_Experience_Generator(object): """ Plays n episode in parallel using a fixed agent. """ def __init__(self, environment, policy, seed, hyperparameters, action_size, use_GPU=False, action_choice_output_columns=None): self.use_GPU = use_GPU self.environment = environment self.policy = policy self.action_choice_output_columns = action_choice_output_columns self.hyperparameters = hyperparameters self.noise = OrnsteinUhlenbeckActionNoise(mu=[0 for _ in range(self.environment.action_shape[1])], sigma=0.15, theta=.01, dt=1e-2, seed=seed) def play_n_episodes(self, n): """Plays n episodes in parallel using the fixed policy and returns the data""" with closing(Pool(processes=n)) as pool: results = pool.map(self, range(n)) pool.terminate() states_for_all_episodes = [episode[0] for episode in results] actions_for_all_episodes = [episode[1] for episode in results] rewards_for_all_episodes = [episode[2] for episode in results] return states_for_all_episodes, actions_for_all_episodes, rewards_for_all_episodes def play_1_episode(self, epsilon_exploration): """Plays 1 episode using the fixed policy and returns the data""" state = self.reset_game() done = False episode_states = [] episode_actions = [] episode_rewards = [] while not done: action = self.pick_action(self.policy, state) next_state, reward, done, _ = self.environment.step(action) episode_states.append(state) episode_actions.append(action) episode_rewards.append(reward) state = next_state return episode_states, episode_actions, episode_rewards def reset_game(self): """Resets the game environment so it is ready to play a new episode""" seed = randint(0, sys.maxsize) torch.manual_seed(seed) # Need to do this otherwise each worker generates same experience state = self.environment.reset() return state def pick_action(self, policy, state): state = torch.from_numpy(state).float().unsqueeze(0) actor_output = policy(state) if self.action_choice_output_columns is not None: actor_output = actor_output[:, self.action_choice_output_columns] action_distribution = self.create_distributions(policy, self.environment.action_size) action = action_distribution.sample().cpu() action += torch.Tensor(self.noise()) return action.detach().numpy() @staticmethod def create_distributions(policy_output, number_of_actions): means = policy_output[:, :number_of_actions].squeeze(0) stds = policy_output[:, number_of_actions:].squeeze(0) action_distribution = torch.distributions.normal.Normal(means.squeeze(0), torch.abs(stds)) return action_distribution
39.717647
130
0.653732
import random import torch import sys from contextlib import closing from torch.multiprocessing import Pool from random import randint from exploration_strategies.OUNoise import OrnsteinUhlenbeckActionNoise class Parallel_Experience_Generator(object): def __init__(self, environment, policy, seed, hyperparameters, action_size, use_GPU=False, action_choice_output_columns=None): self.use_GPU = use_GPU self.environment = environment self.policy = policy self.action_choice_output_columns = action_choice_output_columns self.hyperparameters = hyperparameters self.noise = OrnsteinUhlenbeckActionNoise(mu=[0 for _ in range(self.environment.action_shape[1])], sigma=0.15, theta=.01, dt=1e-2, seed=seed) def play_n_episodes(self, n): with closing(Pool(processes=n)) as pool: results = pool.map(self, range(n)) pool.terminate() states_for_all_episodes = [episode[0] for episode in results] actions_for_all_episodes = [episode[1] for episode in results] rewards_for_all_episodes = [episode[2] for episode in results] return states_for_all_episodes, actions_for_all_episodes, rewards_for_all_episodes def play_1_episode(self, epsilon_exploration): state = self.reset_game() done = False episode_states = [] episode_actions = [] episode_rewards = [] while not done: action = self.pick_action(self.policy, state) next_state, reward, done, _ = self.environment.step(action) episode_states.append(state) episode_actions.append(action) episode_rewards.append(reward) state = next_state return episode_states, episode_actions, episode_rewards def reset_game(self): seed = randint(0, sys.maxsize) torch.manual_seed(seed) state = self.environment.reset() return state def pick_action(self, policy, state): state = torch.from_numpy(state).float().unsqueeze(0) actor_output = policy(state) if self.action_choice_output_columns is not None: actor_output = actor_output[:, self.action_choice_output_columns] action_distribution = self.create_distributions(policy, self.environment.action_size) action = action_distribution.sample().cpu() action += torch.Tensor(self.noise()) return action.detach().numpy() @staticmethod def create_distributions(policy_output, number_of_actions): means = policy_output[:, :number_of_actions].squeeze(0) stds = policy_output[:, number_of_actions:].squeeze(0) action_distribution = torch.distributions.normal.Normal(means.squeeze(0), torch.abs(stds)) return action_distribution
true
true
f711d576cf9e1b71718444316027ae1001e6df66
111,408
py
Python
zerver/tests/test_bugdown.py
networksneaker/zulip
fae365f8c7e2d6ee041024a22b6ba5a64cbffe4e
[ "Apache-2.0" ]
null
null
null
zerver/tests/test_bugdown.py
networksneaker/zulip
fae365f8c7e2d6ee041024a22b6ba5a64cbffe4e
[ "Apache-2.0" ]
null
null
null
zerver/tests/test_bugdown.py
networksneaker/zulip
fae365f8c7e2d6ee041024a22b6ba5a64cbffe4e
[ "Apache-2.0" ]
null
null
null
import copy import os import re from typing import Any, Dict, List, Optional, Set, Tuple from unittest import mock import ujson from django.conf import settings from django.test import TestCase, override_settings from zerver.lib import bugdown, mdiff from zerver.lib.actions import ( do_add_alert_words, do_remove_realm_emoji, do_set_realm_property, do_set_user_display_setting, ) from zerver.lib.alert_words import get_alert_word_automaton from zerver.lib.create_user import create_user from zerver.lib.emoji import get_emoji_url from zerver.lib.exceptions import BugdownRenderingException from zerver.lib.mention import possible_mentions, possible_user_group_mentions from zerver.lib.message import render_markdown from zerver.lib.request import JsonableError from zerver.lib.test_classes import ZulipTestCase from zerver.lib.test_runner import slow from zerver.lib.tex import render_tex from zerver.lib.user_groups import create_user_group from zerver.models import ( MAX_MESSAGE_LENGTH, Message, Realm, RealmEmoji, RealmFilter, Stream, UserGroup, UserMessage, UserProfile, flush_per_request_caches, flush_realm_filter, get_client, get_realm, get_stream, realm_filters_for_realm, realm_in_local_realm_filters_cache, ) class FencedBlockPreprocessorTest(TestCase): def test_simple_quoting(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) markdown = [ '~~~ quote', 'hi', 'bye', '', '', ] expected = [ '', '> hi', '> bye', '', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def test_serial_quoting(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) markdown = [ '~~~ quote', 'hi', '~~~', '', '~~~ quote', 'bye', '', '', ] expected = [ '', '> hi', '', '', '', '> bye', '', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def test_serial_code(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) # Simulate code formatting. processor.format_code = lambda lang, code: lang + ':' + code # type: ignore[assignment] # mypy doesn't allow monkey-patching functions processor.placeholder = lambda s: '**' + s.strip('\n') + '**' # type: ignore[assignment] # https://github.com/python/mypy/issues/708 markdown = [ '``` .py', 'hello()', '```', '', '```vb.net', 'goodbye()', '```', '', '```c#', 'weirdchar()', '```', '', '```', 'no-highlight()', '```', '', ] expected = [ '', '**py:hello()**', '', '', '', '**vb.net:goodbye()**', '', '', '', '**c#:weirdchar()**', '', '', '', '**:no-highlight()**', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def test_nested_code(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) # Simulate code formatting. processor.format_code = lambda lang, code: lang + ':' + code # type: ignore[assignment] # mypy doesn't allow monkey-patching functions processor.placeholder = lambda s: '**' + s.strip('\n') + '**' # type: ignore[assignment] # https://github.com/python/mypy/issues/708 markdown = [ '~~~ quote', 'hi', '``` .py', 'hello()', '```', '', '', ] expected = [ '', '> hi', '', '> **py:hello()**', '', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def bugdown_convert(content: str) -> str: return bugdown.convert( content=content, message_realm=get_realm('zulip'), ) class BugdownMiscTest(ZulipTestCase): def test_diffs_work_as_expected(self) -> None: str1 = "<p>The quick brown fox jumps over the lazy dog. Animal stories are fun, yeah</p>" str2 = "<p>The fast fox jumps over the lazy dogs and cats. Animal stories are fun</p>" expected_diff = "\u001b[34m-\u001b[0m <p>The \u001b[33mquick brown\u001b[0m fox jumps over the lazy dog. Animal stories are fun\u001b[31m, yeah\u001b[0m</p>\n\u001b[34m+\u001b[0m <p>The \u001b[33mfast\u001b[0m fox jumps over the lazy dog\u001b[32ms and cats\u001b[0m. Animal stories are fun</p>\n" self.assertEqual(mdiff.diff_strings(str1, str2), expected_diff) def test_get_possible_mentions_info(self) -> None: realm = get_realm('zulip') def make_user(email: str, full_name: str) -> UserProfile: return create_user( email=email, password='whatever', realm=realm, full_name=full_name, short_name='whatever', ) fred1 = make_user('fred1@example.com', 'Fred Flintstone') fred1.is_active = False fred1.save() fred2 = make_user('fred2@example.com', 'Fred Flintstone') fred3 = make_user('fred3@example.com', 'Fred Flintstone') fred3.is_active = False fred3.save() fred4 = make_user('fred4@example.com', 'Fred Flintstone') lst = bugdown.get_possible_mentions_info(realm.id, {'Fred Flintstone', 'cordelia LEAR', 'Not A User'}) set_of_names = set(map(lambda x: x['full_name'].lower(), lst)) self.assertEqual(set_of_names, {'fred flintstone', 'cordelia lear'}) by_id = { row['id']: row for row in lst } self.assertEqual(by_id.get(fred2.id), dict( email=fred2.email, full_name='Fred Flintstone', id=fred2.id, )) self.assertEqual(by_id.get(fred4.id), dict( email=fred4.email, full_name='Fred Flintstone', id=fred4.id, )) def test_mention_data(self) -> None: realm = get_realm('zulip') hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') content = '@**King Hamlet** @**Cordelia lear**' mention_data = bugdown.MentionData(realm.id, content) self.assertEqual(mention_data.get_user_ids(), {hamlet.id, cordelia.id}) self.assertEqual(mention_data.get_user_by_id(hamlet.id), dict( email=hamlet.email, full_name=hamlet.full_name, id=hamlet.id, )) user = mention_data.get_user_by_name('king hamLET') assert(user is not None) self.assertEqual(user['email'], hamlet.email) self.assertFalse(mention_data.message_has_wildcards()) content = '@**King Hamlet** @**Cordelia lear** @**all**' mention_data = bugdown.MentionData(realm.id, content) self.assertTrue(mention_data.message_has_wildcards()) def test_invalid_katex_path(self) -> None: with self.settings(DEPLOY_ROOT="/nonexistent"): with mock.patch('logging.error') as mock_logger: render_tex("random text") mock_logger.assert_called_with("Cannot find KaTeX for latex rendering!") class BugdownListPreprocessorTest(ZulipTestCase): # We test that the preprocessor inserts blank lines at correct places. # We use <> to indicate that we need to insert a blank line here. def split_message(self, msg: str) -> Tuple[List[str], List[str]]: original = msg.replace('<>', '').split('\n') expected = re.split(r'\n|<>', msg) return original, expected def test_basic_list(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() original, expected = self.split_message('List without a gap\n<>* One\n* Two') self.assertEqual(preprocessor.run(original), expected) def test_list_after_quotes(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() original, expected = self.split_message('```quote\nSomething\n```\n\nList without a gap\n<>* One\n* Two') self.assertEqual(preprocessor.run(original), expected) def test_list_in_code(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() original, expected = self.split_message('```\nList without a gap\n* One\n* Two\n```') self.assertEqual(preprocessor.run(original), expected) def test_complex_nesting_with_different_fences(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() msg = """```quote In quote. We should convert a list here:<> * one * two ~~~ This is a nested code fence, do not make changes here: * one * two ````quote Quote in code fence. Should not convert: * one * two ```` ~~~ Back in the quote. We should convert:<> * one * two ``` Outside. Should convert:<> * one * two """ original, expected = self.split_message(msg) self.assertEqual(preprocessor.run(original), expected) def test_complex_nesting_with_same_fence(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() msg = """```quote In quote. We should convert a list here:<> * one * two ```python This is a nested code fence, do not make changes here: * one * two ```quote Quote in code fence. Should not convert: * one * two ``` ``` Back in the quote. We should convert:<> * one * two ``` Outside. Should convert:<> * one * two """ original, expected = self.split_message(msg) self.assertEqual(preprocessor.run(original), expected) class BugdownTest(ZulipTestCase): def setUp(self) -> None: super().setUp() bugdown.clear_state_for_testing() def assertEqual(self, first: Any, second: Any, msg: str = "") -> None: if isinstance(first, str) and isinstance(second, str): if first != second: raise AssertionError("Actual and expected outputs do not match; showing diff.\n" + mdiff.diff_strings(first, second) + msg) else: super().assertEqual(first, second) def load_bugdown_tests(self) -> Tuple[Dict[str, Any], List[List[str]]]: test_fixtures = {} with open(os.path.join(os.path.dirname(__file__), 'fixtures/markdown_test_cases.json')) as f: data = ujson.load(f) for test in data['regular_tests']: test_fixtures[test['name']] = test return test_fixtures, data['linkify_tests'] def test_bugdown_no_ignores(self) -> None: # We do not want any ignored tests to be committed and merged. format_tests, linkify_tests = self.load_bugdown_tests() for name, test in format_tests.items(): message = f'Test "{name}" shouldn\'t be ignored.' is_ignored = test.get('ignore', False) self.assertFalse(is_ignored, message) @slow("Aggregate of runs dozens of individual markdown tests") def test_bugdown_fixtures(self) -> None: format_tests, linkify_tests = self.load_bugdown_tests() valid_keys = {"name", "input", "expected_output", "backend_only_rendering", "marked_expected_output", "text_content", "translate_emoticons", "ignore"} for name, test in format_tests.items(): with self.subTest(markdown_test_case=name): # Check that there aren't any unexpected keys as those are often typos self.assertEqual(len(set(test.keys()) - valid_keys), 0) # Ignore tests if specified if test.get('ignore', False): continue # nocoverage if test.get('translate_emoticons', False): # Create a userprofile and send message with it. user_profile = self.example_user('othello') do_set_user_display_setting(user_profile, 'translate_emoticons', True) msg = Message(sender=user_profile, sending_client=get_client("test")) converted = render_markdown(msg, test['input']) else: converted = bugdown_convert(test['input']) self.assertEqual(converted, test['expected_output']) def replaced(payload: str, url: str, phrase: str='') -> str: if url[:4] == 'http': href = url elif '@' in url: href = 'mailto:' + url else: href = 'http://' + url return payload % (f"<a href=\"{href}\">{url}</a>",) print("Running Bugdown Linkify tests") with mock.patch('zerver.lib.url_preview.preview.link_embed_data_from_cache', return_value=None): for inline_url, reference, url in linkify_tests: try: match = replaced(reference, url, phrase=inline_url) except TypeError: match = reference converted = bugdown_convert(inline_url) self.assertEqual(match, converted) def test_inline_file(self) -> None: msg = 'Check out this file file:///Volumes/myserver/Users/Shared/pi.py' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Check out this file <a href="file:///Volumes/myserver/Users/Shared/pi.py">file:///Volumes/myserver/Users/Shared/pi.py</a></p>') bugdown.clear_state_for_testing() with self.settings(ENABLE_FILE_LINKS=False): realm = Realm.objects.create(string_id='file_links_test') bugdown.maybe_update_markdown_engines(realm.id, False) converted = bugdown.convert(msg, message_realm=realm) self.assertEqual(converted, '<p>Check out this file file:///Volumes/myserver/Users/Shared/pi.py</p>') def test_inline_bitcoin(self) -> None: msg = 'To bitcoin:1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa or not to bitcoin' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>To <a href="bitcoin:1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa">bitcoin:1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa</a> or not to bitcoin</p>') def test_inline_youtube(self) -> None: msg = 'Check out the debate: http://www.youtube.com/watch?v=hx1mjT73xYE' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Check out the debate: <a href="http://www.youtube.com/watch?v=hx1mjT73xYE">http://www.youtube.com/watch?v=hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="http://www.youtube.com/watch?v=hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') msg = 'http://www.youtube.com/watch?v=hx1mjT73xYE' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="http://www.youtube.com/watch?v=hx1mjT73xYE">http://www.youtube.com/watch?v=hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="http://www.youtube.com/watch?v=hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') msg = 'https://youtu.be/hx1mjT73xYE' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://youtu.be/hx1mjT73xYE">https://youtu.be/hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="https://youtu.be/hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') msg = 'https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo' not_converted = bugdown_convert(msg) self.assertEqual(not_converted, '<p><a href="https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo">https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo</a></p>') msg = 'https://www.youtube.com/playlist?v=O5nskjZ_GoI&list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo">https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo</a></p>\n<div class="youtube-video message_inline_image"><a data-id="O5nskjZ_GoI" href="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo"><img src="https://i.ytimg.com/vi/O5nskjZ_GoI/default.jpg"></a></div>') msg = 'http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw">http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw</a></p>\n<div class="youtube-video message_inline_image"><a data-id="nOJgD4fcZhI" href="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw"><img src="https://i.ytimg.com/vi/nOJgD4fcZhI/default.jpg"></a></div>') @override_settings(INLINE_URL_EMBED_PREVIEW=False) def test_inline_vimeo(self) -> None: msg = 'Check out the debate: https://vimeo.com/246979354' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Check out the debate: <a href="https://vimeo.com/246979354">https://vimeo.com/246979354</a></p>') msg = 'https://vimeo.com/246979354' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://vimeo.com/246979354">https://vimeo.com/246979354</a></p>') @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_thumbnail_url(self) -> None: realm = get_realm("zephyr") msg = '[foobar](/user_uploads/{realm_id}/50/w2G6ok9kr8AMCQCTNAUOFMln/IMG_0677.JPG)' msg = msg.format(realm_id=realm.id) thumbnail_img = '<img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F50%2Fw2G6ok9kr8AMCQCTNAUOFMln%2FIMG_0677.JPG&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F50%2Fw2G6ok9kr8AMCQCTNAUOFMln%2FIMG_0677.JPG&amp;size=thumbnail"><' thumbnail_img = thumbnail_img.format(realm_id=realm.id) converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = 'https://www.google.com/images/srpr/logo4w.png' thumbnail_img = '<img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail">' converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = 'www.google.com/images/srpr/logo4w.png' thumbnail_img = '<img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail">' converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = 'https://www.google.com/images/srpr/logo4w.png' thumbnail_img = '<div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png"><img src="https://www.google.com/images/srpr/logo4w.png"></a></div>' with self.settings(THUMBNAIL_IMAGES=False): converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) # Any url which is not an external link and doesn't start with # /user_uploads/ is not thumbnailed msg = '[foobar](/static/images/cute/turtle.png)' thumbnail_img = '<div class="message_inline_image"><a href="/static/images/cute/turtle.png" title="foobar"><img src="/static/images/cute/turtle.png"></a></div>' converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = '[foobar](/user_avatars/{realm_id}/emoji/images/50.png)' msg = msg.format(realm_id=realm.id) thumbnail_img = '<div class="message_inline_image"><a href="/user_avatars/{realm_id}/emoji/images/50.png" title="foobar"><img src="/user_avatars/{realm_id}/emoji/images/50.png"></a></div>' thumbnail_img = thumbnail_img.format(realm_id=realm.id) converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_preview(self) -> None: with_preview = '<div class="message_inline_image"><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=thumbnail"></a></div>' without_preview = '<p><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></p>' content = 'http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, with_preview) realm = msg.get_realm() setattr(realm, 'inline_image_preview', False) realm.save() sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, without_preview) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_quoted_blocks(self) -> None: content = 'http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg' expected = '<div class="message_inline_image"><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=thumbnail"></a></div>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = '>http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg\n\nAwesome!' expected = '<blockquote>\n<p><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></p>\n</blockquote>\n<p>Awesome!</p>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = '>* http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg\n\nAwesome!' expected = '<blockquote>\n<ul>\n<li><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></li>\n</ul>\n</blockquote>\n<p>Awesome!</p>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_preview_order(self) -> None: realm = get_realm("zulip") content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\nhttp://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\nhttp://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg' expected = '<p><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg</a><br>\n<a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg</a><br>\n<a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg</a></p>\n<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=thumbnail"></a></div><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_02.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_02.jpg&amp;size=thumbnail"></a></div><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=thumbnail"></a></div>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\n\n>http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\n\n* http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg\n* https://www.google.com/images/srpr/logo4w.png' expected = '<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=thumbnail"></a></div><blockquote>\n<p><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg</a></p>\n</blockquote>\n<ul>\n<li><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=thumbnail"></a></div></li>\n<li><div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail"></a></div></li>\n</ul>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = 'Test 1\n[21136101110_1dde1c1a7e_o.jpg](/user_uploads/{realm_id}/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg) \n\nNext Image\n[IMG_20161116_023910.jpg](/user_uploads/{realm_id}/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg) \n\nAnother Screenshot\n[Screenshot-from-2016-06-01-16-22-42.png](/user_uploads/{realm_id}/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png)' content = content.format(realm_id=realm.id) expected = '<p>Test 1<br>\n<a href="/user_uploads/{realm_id}/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg">21136101110_1dde1c1a7e_o.jpg</a> </p>\n<div class="message_inline_image"><a href="/user_uploads/{realm_id}/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg" title="21136101110_1dde1c1a7e_o.jpg"><img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F6d%2FF1PX6u16JA2P-nK45PyxHIYZ%2F21136101110_1dde1c1a7e_o.jpg&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F6d%2FF1PX6u16JA2P-nK45PyxHIYZ%2F21136101110_1dde1c1a7e_o.jpg&amp;size=thumbnail"></a></div><p>Next Image<br>\n<a href="/user_uploads/{realm_id}/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg">IMG_20161116_023910.jpg</a> </p>\n<div class="message_inline_image"><a href="/user_uploads/{realm_id}/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg" title="IMG_20161116_023910.jpg"><img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F69%2Fsh7L06e7uH7NaX6d5WFfVYQp%2FIMG_20161116_023910.jpg&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F69%2Fsh7L06e7uH7NaX6d5WFfVYQp%2FIMG_20161116_023910.jpg&amp;size=thumbnail"></a></div><p>Another Screenshot<br>\n<a href="/user_uploads/{realm_id}/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png">Screenshot-from-2016-06-01-16-22-42.png</a></p>\n<div class="message_inline_image"><a href="/user_uploads/{realm_id}/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png" title="Screenshot-from-2016-06-01-16-22-42.png"><img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F70%2F_aZmIEWaN1iUaxwkDjkO7bpj%2FScreenshot-from-2016-06-01-16-22-42.png&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F70%2F_aZmIEWaN1iUaxwkDjkO7bpj%2FScreenshot-from-2016-06-01-16-22-42.png&amp;size=thumbnail"></a></div>' expected = expected.format(realm_id=realm.id) msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_corrected_image_source(self) -> None: # testing only wikipedia because linx.li urls can be expected to expire content = 'https://en.wikipedia.org/wiki/File:Wright_of_Derby,_The_Orrery.jpg' expected = '<div class="message_inline_image"><a href="https://en.wikipedia.org/wiki/Special:FilePath/File:Wright_of_Derby,_The_Orrery.jpg"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FSpecial%3AFilePath%2FFile%3AWright_of_Derby%2C_The_Orrery.jpg&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FSpecial%3AFilePath%2FFile%3AWright_of_Derby%2C_The_Orrery.jpg&amp;size=thumbnail"></a></div>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) @override_settings(INLINE_IMAGE_PREVIEW=False) def test_image_preview_enabled(self) -> None: ret = bugdown.image_preview_enabled() self.assertEqual(ret, False) settings.INLINE_IMAGE_PREVIEW = True sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) realm = message.get_realm() ret = bugdown.image_preview_enabled() self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(no_previews=True) self.assertEqual(ret, False) ret = bugdown.image_preview_enabled(message, realm) self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(message) self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(message, realm, no_previews=True) self.assertEqual(ret, False) ret = bugdown.image_preview_enabled(message, no_previews=True) self.assertEqual(ret, False) @override_settings(INLINE_URL_EMBED_PREVIEW=False) def test_url_embed_preview_enabled(self) -> None: sender_user_profile = self.example_user('othello') message = copy.deepcopy(Message(sender=sender_user_profile, sending_client=get_client("test"))) realm = message.get_realm() realm.inline_url_embed_preview = True # off by default realm.save(update_fields=['inline_url_embed_preview']) ret = bugdown.url_embed_preview_enabled() self.assertEqual(ret, False) settings.INLINE_URL_EMBED_PREVIEW = True ret = bugdown.url_embed_preview_enabled() self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(no_previews=True) self.assertEqual(ret, False) ret = bugdown.url_embed_preview_enabled(message, realm) self.assertEqual(ret, True) ret = bugdown.url_embed_preview_enabled(message) self.assertEqual(ret, True) ret = bugdown.url_embed_preview_enabled(message, no_previews=True) self.assertEqual(ret, False) def test_inline_dropbox(self) -> None: msg = 'Look at how hilarious our old office was: https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG' image_info = {'image': 'https://photos-4.dropbox.com/t/2/AABIre1oReJgPYuc_53iv0IHq1vUzRaDg2rrCfTpiWMccQ/12/129/jpeg/1024x1024/2/_/0/4/IMG_0923.JPG/CIEBIAEgAiAHKAIoBw/ymdijjcg67hv2ta/AABz2uuED1ox3vpWWvMpBxu6a/IMG_0923.JPG', 'desc': 'Shared with Dropbox', 'title': 'IMG_0923.JPG'} with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=image_info): converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Look at how hilarious our old office was: <a href="https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG">https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG</a></p>\n<div class="message_inline_image"><a href="https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG" title="IMG_0923.JPG"><img src="https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG?dl=1"></a></div>') msg = 'Look at my hilarious drawing folder: https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=' image_info = {'image': 'https://cf.dropboxstatic.com/static/images/icons128/folder_dropbox.png', 'desc': 'Shared with Dropbox', 'title': 'Saves'} with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=image_info): converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Look at my hilarious drawing folder: <a href="https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=">https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=</a></p>\n<div class="message_inline_ref"><a href="https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=" title="Saves"><img src="https://cf.dropboxstatic.com/static/images/icons128/folder_dropbox.png"></a><div><div class="message_inline_image_title">Saves</div><desc class="message_inline_image_desc"></desc></div></div>') def test_inline_dropbox_preview(self) -> None: # Test photo album previews msg = 'https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5' image_info = {'image': 'https://photos-6.dropbox.com/t/2/AAAlawaeD61TyNewO5vVi-DGf2ZeuayfyHFdNTNzpGq-QA/12/271544745/jpeg/1024x1024/2/_/0/5/baby-piglet.jpg/CKnjvYEBIAIgBygCKAc/tditp9nitko60n5/AADX03VAIrQlTl28CtujDcMla/0', 'desc': 'Shared with Dropbox', 'title': '1 photo'} with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=image_info): converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5">https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5</a></p>\n<div class="message_inline_image"><a href="https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5" title="1 photo"><img src="https://photos-6.dropbox.com/t/2/AAAlawaeD61TyNewO5vVi-DGf2ZeuayfyHFdNTNzpGq-QA/12/271544745/jpeg/1024x1024/2/_/0/5/baby-piglet.jpg/CKnjvYEBIAIgBygCKAc/tditp9nitko60n5/AADX03VAIrQlTl28CtujDcMla/0"></a></div>') def test_inline_dropbox_negative(self) -> None: # Make sure we're not overzealous in our conversion: msg = 'Look at the new dropbox logo: https://www.dropbox.com/static/images/home_logo.png' with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=None): converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Look at the new dropbox logo: <a href="https://www.dropbox.com/static/images/home_logo.png">https://www.dropbox.com/static/images/home_logo.png</a></p>\n<div class="message_inline_image"><a href="https://www.dropbox.com/static/images/home_logo.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.dropbox.com%2Fstatic%2Fimages%2Fhome_logo.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.dropbox.com%2Fstatic%2Fimages%2Fhome_logo.png&amp;size=thumbnail"></a></div>') def test_inline_dropbox_bad(self) -> None: # Don't fail on bad dropbox links msg = "https://zulip-test.dropbox.com/photos/cl/ROmr9K1XYtmpneM" with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=None): converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://zulip-test.dropbox.com/photos/cl/ROmr9K1XYtmpneM">https://zulip-test.dropbox.com/photos/cl/ROmr9K1XYtmpneM</a></p>') def test_inline_github_preview(self) -> None: # Test photo album previews msg = 'Test: https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Test: <a href="https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png">https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png</a></p>\n<div class="message_inline_image"><a href="https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fraw.githubusercontent.com%2Fzulip%2Fzulip%2Fmaster%2Fstatic%2Fimages%2Flogo%2Fzulip-icon-128x128.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fraw.githubusercontent.com%2Fzulip%2Fzulip%2Fmaster%2Fstatic%2Fimages%2Flogo%2Fzulip-icon-128x128.png&amp;size=thumbnail"></a></div>') msg = 'Test: https://developer.github.com/assets/images/hero-circuit-bg.png' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Test: <a href="https://developer.github.com/assets/images/hero-circuit-bg.png">https://developer.github.com/assets/images/hero-circuit-bg.png</a></p>\n<div class="message_inline_image"><a href="https://developer.github.com/assets/images/hero-circuit-bg.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fdeveloper.github.com%2Fassets%2Fimages%2Fhero-circuit-bg.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fdeveloper.github.com%2Fassets%2Fimages%2Fhero-circuit-bg.png&amp;size=thumbnail"></a></div>') def test_twitter_id_extraction(self) -> None: self.assertEqual(bugdown.get_tweet_id('http://twitter.com/#!/VizzQuotes/status/409030735191097344'), '409030735191097344') self.assertEqual(bugdown.get_tweet_id('http://twitter.com/VizzQuotes/status/409030735191097344'), '409030735191097344') self.assertEqual(bugdown.get_tweet_id('http://twitter.com/VizzQuotes/statuses/409030735191097344'), '409030735191097344') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/wdaher/status/1017581858'), '1017581858') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/wdaher/status/1017581858/'), '1017581858') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/windyoona/status/410766290349879296/photo/1'), '410766290349879296') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/windyoona/status/410766290349879296/'), '410766290349879296') def test_inline_interesting_links(self) -> None: def make_link(url: str) -> str: return f'<a href="{url}">{url}</a>' normal_tweet_html = ('<a href="https://twitter.com/Twitter"' '>@Twitter</a> ' 'meets @seepicturely at #tcdisrupt cc.' '<a href="https://twitter.com/boscomonkey"' '>@boscomonkey</a> ' '<a href="https://twitter.com/episod"' '>@episod</a> ' '<a href="http://t.co/6J2EgYM"' '>http://instagr.am/p/MuW67/</a>') mention_in_link_tweet_html = """<a href="http://t.co/@foo">http://foo.com</a>""" media_tweet_html = ('<a href="http://t.co/xo7pAhK6n3">' 'http://twitter.com/NEVNBoston/status/421654515616849920/photo/1</a>') emoji_in_tweet_html = """Zulip is <span aria-label=\"100\" class="emoji emoji-1f4af" role=\"img\" title="100">:100:</span>% open-source!""" def make_inline_twitter_preview(url: str, tweet_html: str, image_html: str='') -> str: ## As of right now, all previews are mocked to be the exact same tweet return ('<div class="inline-preview-twitter">' '<div class="twitter-tweet">' f'<a href="{url}">' '<img class="twitter-avatar"' ' src="https://external-content.zulipcdn.net/external_content/1f7cd2436976d410eab8189ebceda87ae0b34ead/687474703a2f2f7062732e7477696d672e63' '6f6d2f70726f66696c655f696d616765732f313338303931323137332f53637265656e5f73686f745f323031312d30362d30335f61745f372e33352e33' '365f504d5f6e6f726d616c2e706e67">' '</a>' f'<p>{tweet_html}</p>' '<span>- Eoin McMillan (@imeoin)</span>' f'{image_html}' '</div>' '</div>') msg = 'http://www.twitter.com' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com'))) msg = 'http://www.twitter.com/wdaher/' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/'))) msg = 'http://www.twitter.com/wdaher/status/3' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/3'))) # id too long msg = 'http://www.twitter.com/wdaher/status/2879779692873154569' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/2879779692873154569'))) # id too large (i.e. tweet doesn't exist) msg = 'http://www.twitter.com/wdaher/status/999999999999999999' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/999999999999999999'))) msg = 'http://www.twitter.com/wdaher/status/287977969287315456' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://www.twitter.com/wdaher/status/287977969287315456'), make_inline_twitter_preview('http://www.twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) msg = 'https://www.twitter.com/wdaher/status/287977969287315456' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('https://www.twitter.com/wdaher/status/287977969287315456'), make_inline_twitter_preview('https://www.twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) msg = 'http://twitter.com/wdaher/status/287977969287315456' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315456'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) # Repeated links will only be converted once msg = ('http://twitter.com/wdaher/status/287977969287315456 ' 'http://twitter.com/wdaher/status/287977969287315457 ' 'http://twitter.com/wdaher/status/287977969287315457 ' 'http://twitter.com/wdaher/status/287977969287315457') converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{} {} {} {}</p>\n{}{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315456'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315457', normal_tweet_html))) # A max of 3 will be converted msg = ('http://twitter.com/wdaher/status/287977969287315456 ' 'http://twitter.com/wdaher/status/287977969287315457 ' 'https://twitter.com/wdaher/status/287977969287315456 ' 'http://twitter.com/wdaher/status/287977969287315460') converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{} {} {} {}</p>\n{}{}{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315456'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_link('https://twitter.com/wdaher/status/287977969287315456'), make_link('http://twitter.com/wdaher/status/287977969287315460'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315457', normal_tweet_html), make_inline_twitter_preview('https://twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) # Tweet has a mention in a URL, only the URL is linked msg = 'http://twitter.com/wdaher/status/287977969287315458' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315458'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315458', mention_in_link_tweet_html))) # Tweet with an image msg = 'http://twitter.com/wdaher/status/287977969287315459' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315459'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315459', media_tweet_html, ('<div class="twitter-image">' '<a href="http://t.co/xo7pAhK6n3">' '<img src="https://pbs.twimg.com/media/BdoEjD4IEAIq86Z.jpg:small">' '</a>' '</div>')))) msg = 'http://twitter.com/wdaher/status/287977969287315460' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315460'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315460', emoji_in_tweet_html))) def test_fetch_tweet_data_settings_validation(self) -> None: with self.settings(TEST_SUITE=False, TWITTER_CONSUMER_KEY=None): self.assertIs(None, bugdown.fetch_tweet_data('287977969287315459')) def test_content_has_emoji(self) -> None: self.assertFalse(bugdown.content_has_emoji_syntax('boring')) self.assertFalse(bugdown.content_has_emoji_syntax('hello: world')) self.assertFalse(bugdown.content_has_emoji_syntax(':foobar')) self.assertFalse(bugdown.content_has_emoji_syntax('::: hello :::')) self.assertTrue(bugdown.content_has_emoji_syntax('foo :whatever:')) self.assertTrue(bugdown.content_has_emoji_syntax('\n:whatever:')) self.assertTrue(bugdown.content_has_emoji_syntax(':smile: ::::::')) def test_realm_emoji(self) -> None: def emoji_img(name: str, file_name: str, realm_id: int) -> str: return '<img alt="{}" class="emoji" src="{}" title="{}">'.format( name, get_emoji_url(file_name, realm_id), name[1:-1].replace("_", " ")) realm = get_realm('zulip') # Needs to mock an actual message because that's how bugdown obtains the realm msg = Message(sender=self.example_user('hamlet')) converted = bugdown.convert(":green_tick:", message_realm=realm, message=msg) realm_emoji = RealmEmoji.objects.filter(realm=realm, name='green_tick', deactivated=False).get() self.assertEqual(converted, '<p>{}</p>'.format(emoji_img(':green_tick:', realm_emoji.file_name, realm.id))) # Deactivate realm emoji. do_remove_realm_emoji(realm, 'green_tick') converted = bugdown.convert(":green_tick:", message_realm=realm, message=msg) self.assertEqual(converted, '<p>:green_tick:</p>') def test_deactivated_realm_emoji(self) -> None: # Deactivate realm emoji. realm = get_realm('zulip') do_remove_realm_emoji(realm, 'green_tick') msg = Message(sender=self.example_user('hamlet')) converted = bugdown.convert(":green_tick:", message_realm=realm, message=msg) self.assertEqual(converted, '<p>:green_tick:</p>') def test_unicode_emoji(self) -> None: msg = '\u2615' # ☕ converted = bugdown_convert(msg) self.assertEqual(converted, '<p><span aria-label=\"coffee\" class="emoji emoji-2615" role=\"img\" title="coffee">:coffee:</span></p>') msg = '\u2615\u2615' # ☕☕ converted = bugdown_convert(msg) self.assertEqual(converted, '<p><span aria-label=\"coffee\" class="emoji emoji-2615" role=\"img\" title="coffee">:coffee:</span><span aria-label=\"coffee\" class="emoji emoji-2615" role=\"img\" title="coffee">:coffee:</span></p>') def test_no_translate_emoticons_if_off(self) -> None: user_profile = self.example_user('othello') do_set_user_display_setting(user_profile, 'translate_emoticons', False) msg = Message(sender=user_profile, sending_client=get_client("test")) content = ':)' expected = '<p>:)</p>' converted = render_markdown(msg, content) self.assertEqual(converted, expected) def test_same_markup(self) -> None: msg = '\u2615' # ☕ unicode_converted = bugdown_convert(msg) msg = ':coffee:' # ☕☕ converted = bugdown_convert(msg) self.assertEqual(converted, unicode_converted) def test_links_in_topic_name(self) -> None: realm = get_realm('zulip') msg = Message(sender=self.example_user('othello')) msg.set_topic_name("https://google.com/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['https://google.com/hello-world']) msg.set_topic_name("http://google.com/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['http://google.com/hello-world']) msg.set_topic_name("Without scheme google.com/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['https://google.com/hello-world']) msg.set_topic_name("Without scheme random.words/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, []) msg.set_topic_name("Try out http://ftp.debian.org, https://google.com/ and https://google.in/.") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['http://ftp.debian.org', 'https://google.com/', 'https://google.in/']) def test_realm_patterns(self) -> None: realm = get_realm('zulip') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') msg = Message(sender=self.example_user('othello')) msg.set_topic_name("#444") flush_per_request_caches() content = "We should fix #224 and #115, but not issue#124 or #1124z or [trac #15](https://trac.example.com/ticket/16) today." converted = bugdown.convert(content, message_realm=realm, message=msg) converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted, '<p>We should fix <a href="https://trac.example.com/ticket/224">#224</a> and <a href="https://trac.example.com/ticket/115">#115</a>, but not issue#124 or #1124z or <a href="https://trac.example.com/ticket/16">trac #15</a> today.</p>') self.assertEqual(converted_topic, ['https://trac.example.com/ticket/444']) msg.set_topic_name("#444 https://google.com") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['https://trac.example.com/ticket/444', 'https://google.com']) RealmFilter(realm=realm, pattern=r'#(?P<id>[a-zA-Z]+-[0-9]+)', url_format_string=r'https://trac.example.com/ticket/%(id)s').save() msg = Message(sender=self.example_user('hamlet')) content = '#ZUL-123 was fixed and code was deployed to production, also #zul-321 was deployed to staging' converted = bugdown.convert(content, message_realm=realm, message=msg) self.assertEqual(converted, '<p><a href="https://trac.example.com/ticket/ZUL-123">#ZUL-123</a> was fixed and code was deployed to production, also <a href="https://trac.example.com/ticket/zul-321">#zul-321</a> was deployed to staging</p>') def assert_conversion(content: str, convert: bool=True) -> None: converted = bugdown.convert(content, message_realm=realm, message=msg) converted_topic = bugdown.topic_links(realm.id, content) if convert: self.assertTrue('trac.example.com' in converted) self.assertEqual(len(converted_topic), 1) self.assertTrue('trac.example.com' in converted_topic[0]) else: self.assertTrue('trac.example.com' not in converted) self.assertEqual(len(converted_topic), 0) assert_conversion('Hello #123 World') assert_conversion('Hello #123World', False) assert_conversion('Hello#123 World', False) assert_conversion('Hello#123World', False) # Ideally, these should be converted, but bugdown doesn't # handle word boundary detection in languages that don't use # whitespace for that correctly yet. assert_conversion('チケットは#123です', False) assert_conversion('チケットは #123です', False) assert_conversion('チケットは#123 です', False) assert_conversion('チケットは #123 です') assert_conversion('(#123)') assert_conversion('#123>') assert_conversion('"#123"') assert_conversion('#123@') assert_conversion(')#123(', False) assert_conversion('##123', False) # test nested realm patterns should avoid double matching RealmFilter(realm=realm, pattern=r'hello#(?P<id>[0-9]+)', url_format_string=r'https://trac.example.com/hello/%(id)s').save() converted_topic = bugdown.topic_links(realm.id, 'hello#123 #234') self.assertEqual(converted_topic, ['https://trac.example.com/ticket/234', 'https://trac.example.com/hello/123']) def test_maybe_update_markdown_engines(self) -> None: realm = get_realm('zulip') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() bugdown.realm_filter_data = {} bugdown.maybe_update_markdown_engines(None, False) all_filters = bugdown.realm_filter_data zulip_filters = all_filters[realm.id] self.assertEqual(len(zulip_filters), 1) self.assertEqual(zulip_filters[0], ('#(?P<id>[0-9]{2,8})', 'https://trac.example.com/ticket/%(id)s', realm_filter.id)) def test_flush_realm_filter(self) -> None: realm = get_realm('zulip') def flush() -> None: ''' flush_realm_filter is a post-save hook, so calling it directly for testing is kind of awkward ''' class Instance: realm_id: Optional[int] = None instance = Instance() instance.realm_id = realm.id flush_realm_filter(sender=None, instance=instance) def save_new_realm_filter() -> None: realm_filter = RealmFilter(realm=realm, pattern=r"whatever", url_format_string='whatever') realm_filter.save() # start fresh for our realm flush() self.assertFalse(realm_in_local_realm_filters_cache(realm.id)) # call this just for side effects of populating the cache realm_filters_for_realm(realm.id) self.assertTrue(realm_in_local_realm_filters_cache(realm.id)) # Saving a new RealmFilter should have the side effect of # flushing the cache. save_new_realm_filter() self.assertFalse(realm_in_local_realm_filters_cache(realm.id)) # and flush it one more time, to make sure we don't get a KeyError flush() self.assertFalse(realm_in_local_realm_filters_cache(realm.id)) def test_realm_patterns_negative(self) -> None: realm = get_realm('zulip') RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=r"https://trac.example.com/ticket/%(id)s").save() boring_msg = Message(sender=self.example_user('othello')) boring_msg.set_topic_name("no match here") converted_boring_topic = bugdown.topic_links(realm.id, boring_msg.topic_name()) self.assertEqual(converted_boring_topic, []) def test_is_status_message(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = '/me makes a list\n* one\n* two' rendered_content = render_markdown(msg, content) self.assertEqual( rendered_content, '<p>/me makes a list</p>\n<ul>\n<li>one</li>\n<li>two</li>\n</ul>', ) self.assertTrue(Message.is_status_message(content, rendered_content)) content = '/me takes a walk' rendered_content = render_markdown(msg, content) self.assertEqual( rendered_content, '<p>/me takes a walk</p>', ) self.assertTrue(Message.is_status_message(content, rendered_content)) content = '/me writes a second line\nline' rendered_content = render_markdown(msg, content) self.assertEqual( rendered_content, '<p>/me writes a second line<br>\nline</p>', ) self.assertTrue(Message.is_status_message(content, rendered_content)) def test_alert_words(self) -> None: user_profile = self.example_user('othello') do_add_alert_words(user_profile, ["ALERTWORD", "scaryword"]) msg = Message(sender=user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = "We have an ALERTWORD day today!" self.assertEqual(render(msg, content), "<p>We have an ALERTWORD day today!</p>") self.assertEqual(msg.user_ids_with_alert_words, {user_profile.id}) msg = Message(sender=user_profile, sending_client=get_client("test")) content = "We have a NOTHINGWORD day today!" self.assertEqual(render(msg, content), "<p>We have a NOTHINGWORD day today!</p>") self.assertEqual(msg.user_ids_with_alert_words, set()) def test_alert_words_returns_user_ids_with_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['how'], 'cordelia': ['this possible'], 'iago': ['hello'], 'prospero': ['hello'], 'othello': ['how are you'], 'aaron': ['hey'], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = "hello how is this possible how are you doing today" render(msg, content) expected_user_ids: Set[int] = { user_profiles['hamlet'].id, user_profiles['cordelia'].id, user_profiles['iago'].id, user_profiles['prospero'].id, user_profiles['othello'].id, } # All users except aaron have their alert word appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_user_ids_with_alert_words_1(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['provisioning', 'Prod deployment'], 'cordelia': ['test', 'Prod'], 'iago': ['prod'], 'prospero': ['deployment'], 'othello': ['last'], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """Hello, everyone. Prod deployment has been completed And this is a new line to test out how markdown convert this into something line ending splitted array and this is a new line last""" render(msg, content) expected_user_ids: Set[int] = { user_profiles['hamlet'].id, user_profiles['cordelia'].id, user_profiles['iago'].id, user_profiles['prospero'].id, user_profiles['othello'].id, } # All users have their alert word appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_user_ids_with_alert_words_in_french(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['réglementaire', 'une politique', 'une merveille'], 'cordelia': ['énormément', 'Prod'], 'iago': ['prod'], 'prospero': ['deployment'], 'othello': ['last'], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """This is to test out alert words work in languages with accented characters too bonjour est (énormément) ce a quoi ressemble le français et j'espère qu'il n'y n' réglementaire a pas de mots d'alerte dans ce texte français """ render(msg, content) expected_user_ids: Set[int] = {user_profiles['hamlet'].id, user_profiles['cordelia'].id} # Only hamlet and cordelia have their alert-words appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_empty_user_ids_with_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': [], 'cordelia': [], 'iago': [], 'prospero': [], 'othello': [], 'aaron': [], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """hello how is this possible how are you doing today This is to test that the no user_ids who have alrert wourldword is participating in sending of the message """ render(msg, content) expected_user_ids: Set[int] = set() # None of the users have their alert-words appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def get_mock_alert_words(self, num_words: int, word_length: int) -> List[str]: alert_words = ['x' * word_length] * num_words # type List[str] return alert_words def test_alert_words_with_empty_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': [], 'cordelia': [], 'iago': [], 'othello': [], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """This is to test a empty alert words i.e. no user has any alert-words set""" render(msg, content) expected_user_ids: Set[int] = set() self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_retuns_user_ids_with_alert_words_with_huge_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['issue124'], 'cordelia': self.get_mock_alert_words(500, 10), 'iago': self.get_mock_alert_words(500, 10), 'othello': self.get_mock_alert_words(500, 10), } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """The code above will print 10 random values of numbers between 1 and 100. The second line, for x in range(10), determines how many values will be printed (when you use range(x), the number that you use in place of x will be the amount of values that you'll have printed. if you want 20 values, use range(20). use range(5) if you only want 5 values returned, etc.). I was talking abou the issue124 on github. Then the third line: print random.randint(1,101) will automatically select a random integer between 1 and 100 for you. The process is fairly simple """ render(msg, content) expected_user_ids: Set[int] = {user_profiles['hamlet'].id} # Only hamlet has alert-word 'issue124' present in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_default_code_block_language(self) -> None: realm = get_realm('zulip') self.assertEqual(realm.default_code_block_language, None) text = "```{}\nconsole.log('Hello World');\n```\n" # Render without default language msg_with_js = bugdown_convert(text.format('js')) msg_with_python = bugdown_convert(text.format('python')) msg_without_language = bugdown_convert(text.format('')) msg_with_quote = bugdown_convert(text.format('quote')) msg_with_math = bugdown_convert(text.format('math')) # Render with default=javascript do_set_realm_property(realm, 'default_code_block_language', 'javascript') msg_without_language_default_js = bugdown_convert(text.format('')) msg_with_python_default_js = bugdown_convert(text.format('python')) # Render with default=python do_set_realm_property(realm, 'default_code_block_language', 'python') msg_without_language_default_py = bugdown_convert(text.format('')) msg_with_none_default_py = bugdown_convert(text.format('none')) # Render with default=quote do_set_realm_property(realm, 'default_code_block_language', 'quote') msg_without_language_default_quote = bugdown_convert(text.format('')) # Render with default=math do_set_realm_property(realm, 'default_code_block_language', 'math') msg_without_language_default_math = bugdown_convert(text.format('')) # Render without default language do_set_realm_property(realm, 'default_code_block_language', None) msg_without_language_final = bugdown_convert(text.format('')) self.assertTrue(msg_with_js == msg_without_language_default_js) self.assertTrue(msg_with_python == msg_with_python_default_js == msg_without_language_default_py) self.assertTrue(msg_with_quote == msg_without_language_default_quote) self.assertTrue(msg_with_math == msg_without_language_default_math) self.assertTrue(msg_without_language == msg_with_none_default_py == msg_without_language_final) # Test checking inside nested quotes nested_text = "````quote\n\n{}\n\n{}````".format(text.format('js'), text.format('')) do_set_realm_property(realm, 'default_code_block_language', 'javascript') rendered = bugdown_convert(nested_text) with_language, without_language = re.findall(r'<pre>(.*?)$', rendered, re.MULTILINE) self.assertTrue(with_language == without_language) do_set_realm_property(realm, 'default_code_block_language', None) rendered = bugdown_convert(nested_text) with_language, without_language = re.findall(r'<pre>(.*?)$', rendered, re.MULTILINE) self.assertFalse(with_language == without_language) def test_mention_wildcard(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@**all** test" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" data-user-id="*">' '@all' '</span> test</p>') self.assertTrue(msg.mentions_wildcard) def test_mention_everyone(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@**everyone** test" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" data-user-id="*">' '@everyone' '</span> test</p>') self.assertTrue(msg.mentions_wildcard) def test_mention_stream(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@**stream** test" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" data-user-id="*">' '@stream' '</span> test</p>') self.assertTrue(msg.mentions_wildcard) def test_mention_at_wildcard(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@all test" self.assertEqual(render_markdown(msg, content), '<p>@all test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_at_everyone(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@everyone test" self.assertEqual(render_markdown(msg, content), '<p>@everyone test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_word_starting_with_at_wildcard(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "test @alleycat.com test" self.assertEqual(render_markdown(msg, content), '<p>test @alleycat.com test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_at_normal_user(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@aaron test" self.assertEqual(render_markdown(msg, content), '<p>@aaron test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_single(self) -> None: sender_user_profile = self.example_user('othello') user_profile = self.example_user('hamlet') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_id = user_profile.id content = "@**King Hamlet**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{user_id}">' '@King Hamlet</span></p>') self.assertEqual(msg.mentions_user_ids, {user_profile.id}) def test_mention_silent(self) -> None: sender_user_profile = self.example_user('othello') user_profile = self.example_user('hamlet') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_id = user_profile.id content = "@_**King Hamlet**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention silent" ' f'data-user-id="{user_id}">' 'King Hamlet</span></p>') self.assertEqual(msg.mentions_user_ids, set()) def test_possible_mentions(self) -> None: def assert_mentions(content: str, names: Set[str], has_wildcards: bool=False) -> None: self.assertEqual(possible_mentions(content), (names, has_wildcards)) assert_mentions('', set()) assert_mentions('boring', set()) assert_mentions('@**all**', set(), True) assert_mentions('smush@**steve**smush', set()) assert_mentions( 'Hello @**King Hamlet** and @**Cordelia Lear**\n@**Foo van Barson|1234** @**all**', {'King Hamlet', 'Cordelia Lear', 'Foo van Barson|1234'}, True, ) def test_mention_multiple(self) -> None: sender_user_profile = self.example_user('othello') hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "@**King Hamlet** and @**Cordelia Lear**, check this out" self.assertEqual(render_markdown(msg, content), '<p>' '<span class="user-mention" ' f'data-user-id="{hamlet.id}">@King Hamlet</span> and ' '<span class="user-mention" ' f'data-user-id="{cordelia.id}">@Cordelia Lear</span>, ' 'check this out</p>') self.assertEqual(msg.mentions_user_ids, {hamlet.id, cordelia.id}) def test_mention_in_quotes(self) -> None: othello = self.example_user('othello') hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') msg = Message(sender=othello, sending_client=get_client("test")) content = "> @**King Hamlet** and @**Othello, the Moor of Venice**\n\n @**King Hamlet** and @**Cordelia Lear**" self.assertEqual(render_markdown(msg, content), '<blockquote>\n<p>' f'<span class="user-mention silent" data-user-id="{hamlet.id}">King Hamlet</span>' ' and ' f'<span class="user-mention silent" data-user-id="{othello.id}">Othello, the Moor of Venice</span>' '</p>\n</blockquote>\n' '<p>' f'<span class="user-mention" data-user-id="{hamlet.id}">@King Hamlet</span>' ' and ' f'<span class="user-mention" data-user-id="{cordelia.id}">@Cordelia Lear</span>' '</p>') self.assertEqual(msg.mentions_user_ids, {hamlet.id, cordelia.id}) # Both fenced quote and > quote should be identical for both silent and regular syntax. expected = ('<blockquote>\n<p>' f'<span class="user-mention silent" data-user-id="{hamlet.id}">King Hamlet</span>' '</p>\n</blockquote>') content = "```quote\n@**King Hamlet**\n```" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) content = "> @**King Hamlet**" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) content = "```quote\n@_**King Hamlet**\n```" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) content = "> @_**King Hamlet**" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_duplicate_full_name(self) -> None: realm = get_realm('zulip') def make_user(email: str, full_name: str) -> UserProfile: return create_user( email=email, password='whatever', realm=realm, full_name=full_name, short_name='whatever', ) sender_user_profile = self.example_user('othello') twin1 = make_user('twin1@example.com', 'Mark Twin') twin2 = make_user('twin2@example.com', 'Mark Twin') cordelia = self.example_user('cordelia') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = f"@**Mark Twin|{twin1.id}**, @**Mark Twin|{twin2.id}** and @**Cordelia Lear**, hi." self.assertEqual(render_markdown(msg, content), '<p>' '<span class="user-mention" ' f'data-user-id="{twin1.id}">@Mark Twin</span>, ' '<span class="user-mention" ' f'data-user-id="{twin2.id}">@Mark Twin</span> and ' '<span class="user-mention" ' f'data-user-id="{cordelia.id}">@Cordelia Lear</span>, ' 'hi.</p>') self.assertEqual(msg.mentions_user_ids, {twin1.id, twin2.id, cordelia.id}) def test_mention_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "Hey @**Nonexistent User**" self.assertEqual(render_markdown(msg, content), '<p>Hey @<strong>Nonexistent User</strong></p>') self.assertEqual(msg.mentions_user_ids, set()) def test_user_mention_atomic_string(self) -> None: sender_user_profile = self.example_user('othello') realm = get_realm('zulip') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) # Create a linkifier. url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') # Create a user that potentially interferes with the pattern. test_user = create_user(email='atomic@example.com', password='whatever', realm=realm, full_name='Atomic #123', short_name='whatever') content = "@**Atomic #123**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{test_user.id}">' '@Atomic #123</span></p>') self.assertEqual(msg.mentions_user_ids, {test_user.id}) content = "@_**Atomic #123**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention silent" ' f'data-user-id="{test_user.id}">' 'Atomic #123</span></p>') self.assertEqual(msg.mentions_user_ids, set()) def create_user_group_for_test(self, user_group_name: str) -> UserGroup: othello = self.example_user('othello') return create_user_group(user_group_name, [othello], get_realm('zulip')) def test_user_group_mention_single(self) -> None: sender_user_profile = self.example_user('othello') user_profile = self.example_user('hamlet') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_id = user_profile.id user_group = self.create_user_group_for_test('support') content = "@**King Hamlet** @*support*" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{user_id}">' '@King Hamlet</span> ' '<span class="user-group-mention" ' f'data-user-group-id="{user_group.id}">' '@support</span></p>') self.assertEqual(msg.mentions_user_ids, {user_profile.id}) self.assertEqual(msg.mentions_user_group_ids, {user_group.id}) def test_user_group_mention_atomic_string(self) -> None: sender_user_profile = self.example_user('othello') realm = get_realm('zulip') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_profile = self.example_user('hamlet') # Create a linkifier. url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') # Create a user-group that potentially interferes with the pattern. user_id = user_profile.id user_group = self.create_user_group_for_test('support #123') content = "@**King Hamlet** @*support #123*" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{user_id}">' '@King Hamlet</span> ' '<span class="user-group-mention" ' f'data-user-group-id="{user_group.id}">' '@support #123</span></p>') self.assertEqual(msg.mentions_user_ids, {user_profile.id}) self.assertEqual(msg.mentions_user_group_ids, {user_group.id}) def test_possible_user_group_mentions(self) -> None: def assert_mentions(content: str, names: Set[str]) -> None: self.assertEqual(possible_user_group_mentions(content), names) assert_mentions('', set()) assert_mentions('boring', set()) assert_mentions('@**all**', set()) assert_mentions('smush@*steve*smush', set()) assert_mentions( '@*support* Hello @**King Hamlet** and @**Cordelia Lear**\n' '@**Foo van Barson** @**all**', {'support'}, ) assert_mentions( 'Attention @*support*, @*frontend* and @*backend*\ngroups.', {'support', 'frontend', 'backend'}, ) def test_user_group_mention_multiple(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) support = self.create_user_group_for_test('support') backend = self.create_user_group_for_test('backend') content = "@*support* and @*backend*, check this out" self.assertEqual(render_markdown(msg, content), '<p>' '<span class="user-group-mention" ' f'data-user-group-id="{support.id}">' '@support</span> ' 'and ' '<span class="user-group-mention" ' f'data-user-group-id="{backend.id}">' '@backend</span>, ' 'check this out' '</p>') self.assertEqual(msg.mentions_user_group_ids, {support.id, backend.id}) def test_user_group_mention_edit(self) -> None: sender_user_profile = self.example_user('hamlet') user_profile = self.example_user('othello') self.create_user_group_for_test('support') self.login('hamlet') msg_id = self.send_stream_message(sender_user_profile, "Denmark", topic_name="editing", content='test') def update_message_and_check_flag(content: str, mentioned: bool) -> None: result = self.client_patch("/json/messages/" + str(msg_id), { 'message_id': msg_id, 'content': content, }) self.assert_json_success(result) um = UserMessage.objects.get( user_profile_id=user_profile.id, message_id=msg_id, ) if mentioned: self.assertIn('mentioned', um.flags_list()) else: self.assertNotIn('mentioned', um.flags_list()) update_message_and_check_flag("@*support*", True) update_message_and_check_flag("@*support-invalid* edited", False) update_message_and_check_flag("@*support* edited", True) update_message_and_check_flag("edited", False) update_message_and_check_flag("@*support*", True) def test_user_group_mention_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "Hey @*Nonexistent group*" self.assertEqual(render_markdown(msg, content), '<p>Hey @<em>Nonexistent group</em></p>') self.assertEqual(msg.mentions_user_group_ids, set()) def test_stream_single(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Denmark**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark">#{d.name}</a></p>'.format( d=denmark, )) def test_stream_multiple(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm = get_realm('zulip') denmark = get_stream('Denmark', realm) scotland = get_stream('Scotland', realm) content = "Look to #**Denmark** and #**Scotland**, there something" self.assertEqual(render_markdown(msg, content), '<p>Look to ' '<a class="stream" ' 'data-stream-id="{denmark.id}" ' 'href="/#narrow/stream/{denmark.id}-Denmark">#{denmark.name}</a> and ' '<a class="stream" ' 'data-stream-id="{scotland.id}" ' 'href="/#narrow/stream/{scotland.id}-Scotland">#{scotland.name}</a>, ' 'there something</p>'.format(denmark=denmark, scotland=scotland)) def test_stream_case_sensitivity(self) -> None: realm = get_realm('zulip') case_sens = Stream.objects.create(name='CaseSens', realm=realm) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**CaseSens**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{s.id}" href="/#narrow/stream/{s.id}-{s.name}">#{s.name}</a></p>'.format( s=case_sens, )) def test_stream_case_sensitivity_nonmatching(self) -> None: """#StreamName requires the stream be spelled with the correct case currently. If we change that in the future, we'll need to change this test.""" realm = get_realm('zulip') Stream.objects.create(name='CaseSens', realm=realm) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**casesens**" self.assertEqual( render_markdown(msg, content), '<p>#<strong>casesens</strong></p>') def test_topic_single(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Denmark>some topic**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream-topic" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark/topic/some.20topic">#{d.name} &gt; some topic</a></p>'.format( d=denmark, )) def test_topic_atomic_string(self) -> None: realm = get_realm('zulip') # Create a linkifier. sender_user_profile = self.example_user('othello') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') # Create a topic link that potentially interferes with the pattern. denmark = get_stream('Denmark', realm) msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Denmark>#1234**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream-topic" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark/topic/.231234">#{d.name} &gt; #1234</a></p>'.format( d=denmark, )) def test_topic_multiple(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) scotland = get_stream('Scotland', get_realm('zulip')) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "This has two links: #**Denmark>some topic** and #**Scotland>other topic**." self.assertEqual( render_markdown(msg, content), '<p>This has two links: ' '<a class="stream-topic" data-stream-id="{denmark.id}" ' 'href="/#narrow/stream/{denmark.id}-{denmark.name}/topic/some.20topic">' '#{denmark.name} &gt; some topic</a>' ' and ' '<a class="stream-topic" data-stream-id="{scotland.id}" ' 'href="/#narrow/stream/{scotland.id}-{scotland.name}/topic/other.20topic">' '#{scotland.name} &gt; other topic</a>' '.</p>'.format(denmark=denmark, scotland=scotland)) def test_possible_stream_names(self) -> None: content = '''#**test here** This mentions #**Denmark** too. #**garçon** #**천국** @**Ignore Person** ''' self.assertEqual( bugdown.possible_linked_stream_names(content), {'test here', 'Denmark', 'garçon', '천국'}, ) def test_stream_unicode(self) -> None: realm = get_realm('zulip') uni = Stream.objects.create(name='привет', realm=realm) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**привет**" quoted_name = '.D0.BF.D1.80.D0.B8.D0.B2.D0.B5.D1.82' href = f'/#narrow/stream/{uni.id}-{quoted_name}' self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{s.id}" href="{href}">#{s.name}</a></p>'.format( s=uni, href=href, )) def test_stream_atomic_string(self) -> None: realm = get_realm('zulip') # Create a linkifier. sender_user_profile = self.example_user('othello') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') # Create a stream that potentially interferes with the pattern. stream = Stream.objects.create(name='Stream #1234', realm=realm) msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Stream #1234**" href = f'/#narrow/stream/{stream.id}-Stream-.231234' self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{s.id}" href="{href}">#{s.name}</a></p>'.format( s=stream, href=href, )) def test_stream_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "There #**Nonexistentstream**" self.assertEqual(render_markdown(msg, content), '<p>There #<strong>Nonexistentstream</strong></p>') self.assertEqual(msg.mentions_user_ids, set()) def test_image_preview_title(self) -> None: msg = '[My favorite image](https://example.com/testimage.png)' converted = bugdown_convert(msg) self.assertEqual( converted, '<p>' '<a href="https://example.com/testimage.png">My favorite image</a>' '</p>\n' '<div class="message_inline_image">' '<a href="https://example.com/testimage.png" title="My favorite image">' '<img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fexample.com%2Ftestimage.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fexample.com%2Ftestimage.png&amp;size=thumbnail">' '</a>' '</div>', ) def test_mit_rendering(self) -> None: """Test the markdown configs for the MIT Zephyr mirroring system; verifies almost all inline patterns are disabled, but inline_interesting_links is still enabled""" msg = "**test**" realm = get_realm("zephyr") client = get_client("zephyr_mirror") message = Message(sending_client=client, sender=self.mit_user("sipbtest")) converted = bugdown.convert(msg, message_realm=realm, message=message) self.assertEqual( converted, "<p>**test**</p>", ) msg = "* test" converted = bugdown.convert(msg, message_realm=realm, message=message) self.assertEqual( converted, "<p>* test</p>", ) msg = "https://lists.debian.org/debian-ctte/2014/02/msg00173.html" converted = bugdown.convert(msg, message_realm=realm, message=message) self.assertEqual( converted, '<p><a href="https://lists.debian.org/debian-ctte/2014/02/msg00173.html">https://lists.debian.org/debian-ctte/2014/02/msg00173.html</a></p>', ) def test_url_to_a(self) -> None: url = 'javascript://example.com/invalidURL' converted = bugdown.url_to_a(db_data=None, url=url, text=url) self.assertEqual( converted, 'javascript://example.com/invalidURL', ) def test_disabled_code_block_processor(self) -> None: msg = "Hello,\n\n" + \ " I am writing this message to test something. I am writing this message to test something." converted = bugdown_convert(msg) expected_output = '<p>Hello,</p>\n' + \ '<div class="codehilite"><pre><span></span><code>I am writing this message to test something. I am writing this message to test something.\n' + \ '</code></pre></div>' self.assertEqual(converted, expected_output) realm = Realm.objects.create(string_id='code_block_processor_test') bugdown.maybe_update_markdown_engines(realm.id, True) converted = bugdown.convert(msg, message_realm=realm, email_gateway=True) expected_output = '<p>Hello,</p>\n' + \ '<p>I am writing this message to test something. I am writing this message to test something.</p>' self.assertEqual(converted, expected_output) def test_normal_link(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "http://example.com/#settings/" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="http://example.com/#settings/">http://example.com/#settings/</a></p>', ) def test_relative_link(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "http://zulip.testserver/#narrow/stream/999-hello" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="#narrow/stream/999-hello">http://zulip.testserver/#narrow/stream/999-hello</a></p>', ) def test_relative_link_streams_page(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "http://zulip.testserver/#streams/all" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="#streams/all">http://zulip.testserver/#streams/all</a></p>', ) def test_md_relative_link(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "[hello](http://zulip.testserver/#narrow/stream/999-hello)" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="#narrow/stream/999-hello">hello</a></p>', ) class BugdownApiTests(ZulipTestCase): def test_render_message_api(self) -> None: content = 'That is a **bold** statement' result = self.api_post( self.example_user("othello"), '/api/v1/messages/render', dict(content=content), ) self.assert_json_success(result) self.assertEqual(result.json()['rendered'], '<p>That is a <strong>bold</strong> statement</p>') def test_render_mention_stream_api(self) -> None: """Determines whether we're correctly passing the realm context""" content = 'This mentions #**Denmark** and @**King Hamlet**.' result = self.api_post( self.example_user("othello"), '/api/v1/messages/render', dict(content=content), ) self.assert_json_success(result) user_id = self.example_user('hamlet').id stream_id = get_stream('Denmark', get_realm('zulip')).id self.assertEqual(result.json()['rendered'], f'<p>This mentions <a class="stream" data-stream-id="{stream_id}" href="/#narrow/stream/{stream_id}-Denmark">#Denmark</a> and <span class="user-mention" data-user-id="{user_id}">@King Hamlet</span>.</p>') class BugdownErrorTests(ZulipTestCase): def test_bugdown_error_handling(self) -> None: with self.simulated_markdown_failure(): with self.assertRaises(BugdownRenderingException): bugdown_convert('') def test_send_message_errors(self) -> None: message = 'whatever' with self.simulated_markdown_failure(): # We don't use assertRaisesRegex because it seems to not # handle i18n properly here on some systems. with self.assertRaises(JsonableError): self.send_stream_message(self.example_user("othello"), "Denmark", message) def test_ultra_long_rendering(self) -> None: """A rendered message with an ultra-long lenght (> 10 * MAX_MESSAGE_LENGTH) throws an exception""" msg = 'mock rendered message\n' * MAX_MESSAGE_LENGTH with mock.patch('zerver.lib.bugdown.timeout', return_value=msg), \ mock.patch('zerver.lib.bugdown.bugdown_logger'): with self.assertRaises(BugdownRenderingException): bugdown_convert(msg) def test_curl_code_block_validation(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) processor.run_content_validators = True # Simulate code formatting. processor.format_code = lambda lang, code: lang + ':' + code # type: ignore[assignment] # mypy doesn't allow monkey-patching functions processor.placeholder = lambda s: '**' + s.strip('\n') + '**' # type: ignore[assignment] # https://github.com/python/mypy/issues/708 markdown = [ '``` curl', 'curl {{ api_url }}/v1/register', ' -u BOT_EMAIL_ADDRESS:BOT_API_KEY', ' -d "queue_id=1375801870:2942"', '```', ] with self.assertRaises(BugdownRenderingException): processor.run(markdown) def test_curl_code_block_without_validation(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) # Simulate code formatting. processor.format_code = lambda lang, code: lang + ':' + code # type: ignore[assignment] # mypy doesn't allow monkey-patching functions processor.placeholder = lambda s: '**' + s.strip('\n') + '**' # type: ignore[assignment] # https://github.com/python/mypy/issues/708 markdown = [ '``` curl', 'curl {{ api_url }}/v1/register', ' -u BOT_EMAIL_ADDRESS:BOT_API_KEY', ' -d "queue_id=1375801870:2942"', '```', ] expected = [ '', '**curl:curl {{ api_url }}/v1/register', ' -u BOT_EMAIL_ADDRESS:BOT_API_KEY', ' -d "queue_id=1375801870:2942"**', '', '', ] result = processor.run(markdown) self.assertEqual(result, expected) class BugdownAvatarTestCase(ZulipTestCase): def test_possible_avatar_emails(self) -> None: content = ''' hello !avatar(foo@example.com) my email is ignore@ignore.com !gravatar(bar@yo.tv) smushing!avatar(hamlet@example.org) is allowed ''' self.assertEqual( bugdown.possible_avatar_emails(content), {'foo@example.com', 'bar@yo.tv', 'hamlet@example.org'}, ) def test_avatar_with_id(self) -> None: sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) user_profile = self.example_user('hamlet') msg = f'!avatar({user_profile.email})' converted = bugdown.convert(msg, message=message) values = {'email': user_profile.email, 'id': user_profile.id} self.assertEqual( converted, '<p><img alt="{email}" class="message_body_gravatar" src="/avatar/{id}?s=30" title="{email}"></p>'.format(**values)) def test_avatar_of_unregistered_user(self) -> None: sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) email = 'fakeuser@example.com' msg = f'!avatar({email})' converted = bugdown.convert(msg, message=message) self.assertEqual( converted, '<p><img alt="{0}" class="message_body_gravatar" src="/avatar/{0}?s=30" title="{0}"></p>'.format(email))
52.011204
1,845
0.635322
import copy import os import re from typing import Any, Dict, List, Optional, Set, Tuple from unittest import mock import ujson from django.conf import settings from django.test import TestCase, override_settings from zerver.lib import bugdown, mdiff from zerver.lib.actions import ( do_add_alert_words, do_remove_realm_emoji, do_set_realm_property, do_set_user_display_setting, ) from zerver.lib.alert_words import get_alert_word_automaton from zerver.lib.create_user import create_user from zerver.lib.emoji import get_emoji_url from zerver.lib.exceptions import BugdownRenderingException from zerver.lib.mention import possible_mentions, possible_user_group_mentions from zerver.lib.message import render_markdown from zerver.lib.request import JsonableError from zerver.lib.test_classes import ZulipTestCase from zerver.lib.test_runner import slow from zerver.lib.tex import render_tex from zerver.lib.user_groups import create_user_group from zerver.models import ( MAX_MESSAGE_LENGTH, Message, Realm, RealmEmoji, RealmFilter, Stream, UserGroup, UserMessage, UserProfile, flush_per_request_caches, flush_realm_filter, get_client, get_realm, get_stream, realm_filters_for_realm, realm_in_local_realm_filters_cache, ) class FencedBlockPreprocessorTest(TestCase): def test_simple_quoting(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) markdown = [ '~~~ quote', 'hi', 'bye', '', '', ] expected = [ '', '> hi', '> bye', '', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def test_serial_quoting(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) markdown = [ '~~~ quote', 'hi', '~~~', '', '~~~ quote', 'bye', '', '', ] expected = [ '', '> hi', '', '', '', '> bye', '', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def test_serial_code(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) processor.format_code = lambda lang, code: lang + ':' + code ' + s.strip('\n') + '**' # type: ignore[assignment] # https://github.com/python/mypy/issues/708 markdown = [ '``` .py', 'hello()', '```', '', '```vb.net', 'goodbye()', '```', '', '```c 'weirdchar()', '```', '', '```', 'no-highlight()', '```', '', ] expected = [ '', '**py:hello()**', '', '', '', '**vb.net:goodbye()**', '', '', '', '**c '', '', '', '**:no-highlight()**', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def test_nested_code(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) # Simulate code formatting. processor.format_code = lambda lang, code: lang + ':' + code # type: ignore[assignment] # mypy doesn't allow monkey-patching functions processor.placeholder = lambda s: '**' + s.strip('\n') + '**' te', 'hi', '``` .py', 'hello()', '```', '', '', ] expected = [ '', '> hi', '', '> **py:hello()**', '', '', '', ] lines = processor.run(markdown) self.assertEqual(lines, expected) def bugdown_convert(content: str) -> str: return bugdown.convert( content=content, message_realm=get_realm('zulip'), ) class BugdownMiscTest(ZulipTestCase): def test_diffs_work_as_expected(self) -> None: str1 = "<p>The quick brown fox jumps over the lazy dog. Animal stories are fun, yeah</p>" str2 = "<p>The fast fox jumps over the lazy dogs and cats. Animal stories are fun</p>" expected_diff = "\u001b[34m-\u001b[0m <p>The \u001b[33mquick brown\u001b[0m fox jumps over the lazy dog. Animal stories are fun\u001b[31m, yeah\u001b[0m</p>\n\u001b[34m+\u001b[0m <p>The \u001b[33mfast\u001b[0m fox jumps over the lazy dog\u001b[32ms and cats\u001b[0m. Animal stories are fun</p>\n" self.assertEqual(mdiff.diff_strings(str1, str2), expected_diff) def test_get_possible_mentions_info(self) -> None: realm = get_realm('zulip') def make_user(email: str, full_name: str) -> UserProfile: return create_user( email=email, password='whatever', realm=realm, full_name=full_name, short_name='whatever', ) fred1 = make_user('fred1@example.com', 'Fred Flintstone') fred1.is_active = False fred1.save() fred2 = make_user('fred2@example.com', 'Fred Flintstone') fred3 = make_user('fred3@example.com', 'Fred Flintstone') fred3.is_active = False fred3.save() fred4 = make_user('fred4@example.com', 'Fred Flintstone') lst = bugdown.get_possible_mentions_info(realm.id, {'Fred Flintstone', 'cordelia LEAR', 'Not A User'}) set_of_names = set(map(lambda x: x['full_name'].lower(), lst)) self.assertEqual(set_of_names, {'fred flintstone', 'cordelia lear'}) by_id = { row['id']: row for row in lst } self.assertEqual(by_id.get(fred2.id), dict( email=fred2.email, full_name='Fred Flintstone', id=fred2.id, )) self.assertEqual(by_id.get(fred4.id), dict( email=fred4.email, full_name='Fred Flintstone', id=fred4.id, )) def test_mention_data(self) -> None: realm = get_realm('zulip') hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') content = '@**King Hamlet** @**Cordelia lear**' mention_data = bugdown.MentionData(realm.id, content) self.assertEqual(mention_data.get_user_ids(), {hamlet.id, cordelia.id}) self.assertEqual(mention_data.get_user_by_id(hamlet.id), dict( email=hamlet.email, full_name=hamlet.full_name, id=hamlet.id, )) user = mention_data.get_user_by_name('king hamLET') assert(user is not None) self.assertEqual(user['email'], hamlet.email) self.assertFalse(mention_data.message_has_wildcards()) content = '@**King Hamlet** @**Cordelia lear** @**all**' mention_data = bugdown.MentionData(realm.id, content) self.assertTrue(mention_data.message_has_wildcards()) def test_invalid_katex_path(self) -> None: with self.settings(DEPLOY_ROOT="/nonexistent"): with mock.patch('logging.error') as mock_logger: render_tex("random text") mock_logger.assert_called_with("Cannot find KaTeX for latex rendering!") class BugdownListPreprocessorTest(ZulipTestCase): def split_message(self, msg: str) -> Tuple[List[str], List[str]]: original = msg.replace('<>', '').split('\n') expected = re.split(r'\n|<>', msg) return original, expected def test_basic_list(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() original, expected = self.split_message('List without a gap\n<>* One\n* Two') self.assertEqual(preprocessor.run(original), expected) def test_list_after_quotes(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() original, expected = self.split_message('```quote\nSomething\n```\n\nList without a gap\n<>* One\n* Two') self.assertEqual(preprocessor.run(original), expected) def test_list_in_code(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() original, expected = self.split_message('```\nList without a gap\n* One\n* Two\n```') self.assertEqual(preprocessor.run(original), expected) def test_complex_nesting_with_different_fences(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() msg = """```quote In quote. We should convert a list here:<> * one * two ~~~ This is a nested code fence, do not make changes here: * one * two ````quote Quote in code fence. Should not convert: * one * two ```` ~~~ Back in the quote. We should convert:<> * one * two ``` Outside. Should convert:<> * one * two """ original, expected = self.split_message(msg) self.assertEqual(preprocessor.run(original), expected) def test_complex_nesting_with_same_fence(self) -> None: preprocessor = bugdown.BugdownListPreprocessor() msg = """```quote In quote. We should convert a list here:<> * one * two ```python This is a nested code fence, do not make changes here: * one * two ```quote Quote in code fence. Should not convert: * one * two ``` ``` Back in the quote. We should convert:<> * one * two ``` Outside. Should convert:<> * one * two """ original, expected = self.split_message(msg) self.assertEqual(preprocessor.run(original), expected) class BugdownTest(ZulipTestCase): def setUp(self) -> None: super().setUp() bugdown.clear_state_for_testing() def assertEqual(self, first: Any, second: Any, msg: str = "") -> None: if isinstance(first, str) and isinstance(second, str): if first != second: raise AssertionError("Actual and expected outputs do not match; showing diff.\n" + mdiff.diff_strings(first, second) + msg) else: super().assertEqual(first, second) def load_bugdown_tests(self) -> Tuple[Dict[str, Any], List[List[str]]]: test_fixtures = {} with open(os.path.join(os.path.dirname(__file__), 'fixtures/markdown_test_cases.json')) as f: data = ujson.load(f) for test in data['regular_tests']: test_fixtures[test['name']] = test return test_fixtures, data['linkify_tests'] def test_bugdown_no_ignores(self) -> None: format_tests, linkify_tests = self.load_bugdown_tests() for name, test in format_tests.items(): message = f'Test "{name}" shouldn\'t be ignored.' is_ignored = test.get('ignore', False) self.assertFalse(is_ignored, message) @slow("Aggregate of runs dozens of individual markdown tests") def test_bugdown_fixtures(self) -> None: format_tests, linkify_tests = self.load_bugdown_tests() valid_keys = {"name", "input", "expected_output", "backend_only_rendering", "marked_expected_output", "text_content", "translate_emoticons", "ignore"} for name, test in format_tests.items(): with self.subTest(markdown_test_case=name): # Check that there aren't any unexpected keys as those are often typos self.assertEqual(len(set(test.keys()) - valid_keys), 0) if test.get('ignore', False): continue if test.get('translate_emoticons', False): user_profile = self.example_user('othello') do_set_user_display_setting(user_profile, 'translate_emoticons', True) msg = Message(sender=user_profile, sending_client=get_client("test")) converted = render_markdown(msg, test['input']) else: converted = bugdown_convert(test['input']) self.assertEqual(converted, test['expected_output']) def replaced(payload: str, url: str, phrase: str='') -> str: if url[:4] == 'http': href = url elif '@' in url: href = 'mailto:' + url else: href = 'http://' + url return payload % (f"<a href=\"{href}\">{url}</a>",) print("Running Bugdown Linkify tests") with mock.patch('zerver.lib.url_preview.preview.link_embed_data_from_cache', return_value=None): for inline_url, reference, url in linkify_tests: try: match = replaced(reference, url, phrase=inline_url) except TypeError: match = reference converted = bugdown_convert(inline_url) self.assertEqual(match, converted) def test_inline_file(self) -> None: msg = 'Check out this file file:///Volumes/myserver/Users/Shared/pi.py' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Check out this file <a href="file:///Volumes/myserver/Users/Shared/pi.py">file:///Volumes/myserver/Users/Shared/pi.py</a></p>') bugdown.clear_state_for_testing() with self.settings(ENABLE_FILE_LINKS=False): realm = Realm.objects.create(string_id='file_links_test') bugdown.maybe_update_markdown_engines(realm.id, False) converted = bugdown.convert(msg, message_realm=realm) self.assertEqual(converted, '<p>Check out this file file:///Volumes/myserver/Users/Shared/pi.py</p>') def test_inline_bitcoin(self) -> None: msg = 'To bitcoin:1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa or not to bitcoin' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>To <a href="bitcoin:1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa">bitcoin:1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa</a> or not to bitcoin</p>') def test_inline_youtube(self) -> None: msg = 'Check out the debate: http://www.youtube.com/watch?v=hx1mjT73xYE' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Check out the debate: <a href="http://www.youtube.com/watch?v=hx1mjT73xYE">http://www.youtube.com/watch?v=hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="http://www.youtube.com/watch?v=hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') msg = 'http://www.youtube.com/watch?v=hx1mjT73xYE' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="http://www.youtube.com/watch?v=hx1mjT73xYE">http://www.youtube.com/watch?v=hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="http://www.youtube.com/watch?v=hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') msg = 'https://youtu.be/hx1mjT73xYE' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://youtu.be/hx1mjT73xYE">https://youtu.be/hx1mjT73xYE</a></p>\n<div class="youtube-video message_inline_image"><a data-id="hx1mjT73xYE" href="https://youtu.be/hx1mjT73xYE"><img src="https://i.ytimg.com/vi/hx1mjT73xYE/default.jpg"></a></div>') msg = 'https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo' not_converted = bugdown_convert(msg) self.assertEqual(not_converted, '<p><a href="https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo">https://www.youtube.com/playlist?list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo</a></p>') msg = 'https://www.youtube.com/playlist?v=O5nskjZ_GoI&list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo">https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo</a></p>\n<div class="youtube-video message_inline_image"><a data-id="O5nskjZ_GoI" href="https://www.youtube.com/playlist?v=O5nskjZ_GoI&amp;list=PL8dPuuaLjXtNlUrzyH5r6jN9ulIgZBpdo"><img src="https://i.ytimg.com/vi/O5nskjZ_GoI/default.jpg"></a></div>') msg = 'http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw">http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw</a></p>\n<div class="youtube-video message_inline_image"><a data-id="nOJgD4fcZhI" href="http://www.youtube.com/watch_videos?video_ids=nOJgD4fcZhI,i96UO8-GFvw"><img src="https://i.ytimg.com/vi/nOJgD4fcZhI/default.jpg"></a></div>') @override_settings(INLINE_URL_EMBED_PREVIEW=False) def test_inline_vimeo(self) -> None: msg = 'Check out the debate: https://vimeo.com/246979354' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Check out the debate: <a href="https://vimeo.com/246979354">https://vimeo.com/246979354</a></p>') msg = 'https://vimeo.com/246979354' converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://vimeo.com/246979354">https://vimeo.com/246979354</a></p>') @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_thumbnail_url(self) -> None: realm = get_realm("zephyr") msg = '[foobar](/user_uploads/{realm_id}/50/w2G6ok9kr8AMCQCTNAUOFMln/IMG_0677.JPG)' msg = msg.format(realm_id=realm.id) thumbnail_img = '<img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F50%2Fw2G6ok9kr8AMCQCTNAUOFMln%2FIMG_0677.JPG&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F50%2Fw2G6ok9kr8AMCQCTNAUOFMln%2FIMG_0677.JPG&amp;size=thumbnail"><' thumbnail_img = thumbnail_img.format(realm_id=realm.id) converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = 'https://www.google.com/images/srpr/logo4w.png' thumbnail_img = '<img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail">' converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = 'www.google.com/images/srpr/logo4w.png' thumbnail_img = '<img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail">' converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = 'https://www.google.com/images/srpr/logo4w.png' thumbnail_img = '<div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png"><img src="https://www.google.com/images/srpr/logo4w.png"></a></div>' with self.settings(THUMBNAIL_IMAGES=False): converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) # /user_uploads/ is not thumbnailed msg = '[foobar](/static/images/cute/turtle.png)' thumbnail_img = '<div class="message_inline_image"><a href="/static/images/cute/turtle.png" title="foobar"><img src="/static/images/cute/turtle.png"></a></div>' converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) msg = '[foobar](/user_avatars/{realm_id}/emoji/images/50.png)' msg = msg.format(realm_id=realm.id) thumbnail_img = '<div class="message_inline_image"><a href="/user_avatars/{realm_id}/emoji/images/50.png" title="foobar"><img src="/user_avatars/{realm_id}/emoji/images/50.png"></a></div>' thumbnail_img = thumbnail_img.format(realm_id=realm.id) converted = bugdown_convert(msg) self.assertIn(thumbnail_img, converted) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_preview(self) -> None: with_preview = '<div class="message_inline_image"><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=thumbnail"></a></div>' without_preview = '<p><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></p>' content = 'http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, with_preview) realm = msg.get_realm() setattr(realm, 'inline_image_preview', False) realm.save() sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, without_preview) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_quoted_blocks(self) -> None: content = 'http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg' expected = '<div class="message_inline_image"><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fcdn.wallpapersafari.com%2F13%2F6%2F16eVjx.jpeg&amp;size=thumbnail"></a></div>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = '>http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg\n\nAwesome!' expected = '<blockquote>\n<p><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></p>\n</blockquote>\n<p>Awesome!</p>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = '>* http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg\n\nAwesome!' expected = '<blockquote>\n<ul>\n<li><a href="http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg">http://cdn.wallpapersafari.com/13/6/16eVjx.jpeg</a></li>\n</ul>\n</blockquote>\n<p>Awesome!</p>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_inline_image_preview_order(self) -> None: realm = get_realm("zulip") content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\nhttp://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\nhttp://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg' expected = '<p><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg</a><br>\n<a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg</a><br>\n<a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg</a></p>\n<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=thumbnail"></a></div><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_02.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_02.jpg&amp;size=thumbnail"></a></div><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=thumbnail"></a></div>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = 'http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg\n\n>http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg\n\n* http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg\n* https://www.google.com/images/srpr/logo4w.png' expected = '<div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_01.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_01.jpg&amp;size=thumbnail"></a></div><blockquote>\n<p><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg">http://imaging.nikon.com/lineup/dslr/df/img/sample/img_02.jpg</a></p>\n</blockquote>\n<ul>\n<li><div class="message_inline_image"><a href="http://imaging.nikon.com/lineup/dslr/df/img/sample/img_03.jpg"><img data-src-fullsize="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=full" src="/thumbnail?url=http%3A%2F%2Fimaging.nikon.com%2Flineup%2Fdslr%2Fdf%2Fimg%2Fsample%2Fimg_03.jpg&amp;size=thumbnail"></a></div></li>\n<li><div class="message_inline_image"><a href="https://www.google.com/images/srpr/logo4w.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.google.com%2Fimages%2Fsrpr%2Flogo4w.png&amp;size=thumbnail"></a></div></li>\n</ul>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) content = 'Test 1\n[21136101110_1dde1c1a7e_o.jpg](/user_uploads/{realm_id}/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg) \n\nNext Image\n[IMG_20161116_023910.jpg](/user_uploads/{realm_id}/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg) \n\nAnother Screenshot\n[Screenshot-from-2016-06-01-16-22-42.png](/user_uploads/{realm_id}/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png)' content = content.format(realm_id=realm.id) expected = '<p>Test 1<br>\n<a href="/user_uploads/{realm_id}/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg">21136101110_1dde1c1a7e_o.jpg</a> </p>\n<div class="message_inline_image"><a href="/user_uploads/{realm_id}/6d/F1PX6u16JA2P-nK45PyxHIYZ/21136101110_1dde1c1a7e_o.jpg" title="21136101110_1dde1c1a7e_o.jpg"><img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F6d%2FF1PX6u16JA2P-nK45PyxHIYZ%2F21136101110_1dde1c1a7e_o.jpg&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F6d%2FF1PX6u16JA2P-nK45PyxHIYZ%2F21136101110_1dde1c1a7e_o.jpg&amp;size=thumbnail"></a></div><p>Next Image<br>\n<a href="/user_uploads/{realm_id}/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg">IMG_20161116_023910.jpg</a> </p>\n<div class="message_inline_image"><a href="/user_uploads/{realm_id}/69/sh7L06e7uH7NaX6d5WFfVYQp/IMG_20161116_023910.jpg" title="IMG_20161116_023910.jpg"><img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F69%2Fsh7L06e7uH7NaX6d5WFfVYQp%2FIMG_20161116_023910.jpg&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F69%2Fsh7L06e7uH7NaX6d5WFfVYQp%2FIMG_20161116_023910.jpg&amp;size=thumbnail"></a></div><p>Another Screenshot<br>\n<a href="/user_uploads/{realm_id}/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png">Screenshot-from-2016-06-01-16-22-42.png</a></p>\n<div class="message_inline_image"><a href="/user_uploads/{realm_id}/70/_aZmIEWaN1iUaxwkDjkO7bpj/Screenshot-from-2016-06-01-16-22-42.png" title="Screenshot-from-2016-06-01-16-22-42.png"><img data-src-fullsize="/thumbnail?url=user_uploads%2F{realm_id}%2F70%2F_aZmIEWaN1iUaxwkDjkO7bpj%2FScreenshot-from-2016-06-01-16-22-42.png&amp;size=full" src="/thumbnail?url=user_uploads%2F{realm_id}%2F70%2F_aZmIEWaN1iUaxwkDjkO7bpj%2FScreenshot-from-2016-06-01-16-22-42.png&amp;size=thumbnail"></a></div>' expected = expected.format(realm_id=realm.id) msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) @override_settings(INLINE_IMAGE_PREVIEW=True) def test_corrected_image_source(self) -> None: # testing only wikipedia because linx.li urls can be expected to expire content = 'https://en.wikipedia.org/wiki/File:Wright_of_Derby,_The_Orrery.jpg' expected = '<div class="message_inline_image"><a href="https://en.wikipedia.org/wiki/Special:FilePath/File:Wright_of_Derby,_The_Orrery.jpg"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FSpecial%3AFilePath%2FFile%3AWright_of_Derby%2C_The_Orrery.jpg&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fen.wikipedia.org%2Fwiki%2FSpecial%3AFilePath%2FFile%3AWright_of_Derby%2C_The_Orrery.jpg&amp;size=thumbnail"></a></div>' sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) converted = render_markdown(msg, content) self.assertEqual(converted, expected) @override_settings(INLINE_IMAGE_PREVIEW=False) def test_image_preview_enabled(self) -> None: ret = bugdown.image_preview_enabled() self.assertEqual(ret, False) settings.INLINE_IMAGE_PREVIEW = True sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) realm = message.get_realm() ret = bugdown.image_preview_enabled() self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(no_previews=True) self.assertEqual(ret, False) ret = bugdown.image_preview_enabled(message, realm) self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(message) self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(message, realm, no_previews=True) self.assertEqual(ret, False) ret = bugdown.image_preview_enabled(message, no_previews=True) self.assertEqual(ret, False) @override_settings(INLINE_URL_EMBED_PREVIEW=False) def test_url_embed_preview_enabled(self) -> None: sender_user_profile = self.example_user('othello') message = copy.deepcopy(Message(sender=sender_user_profile, sending_client=get_client("test"))) realm = message.get_realm() realm.inline_url_embed_preview = True # off by default realm.save(update_fields=['inline_url_embed_preview']) ret = bugdown.url_embed_preview_enabled() self.assertEqual(ret, False) settings.INLINE_URL_EMBED_PREVIEW = True ret = bugdown.url_embed_preview_enabled() self.assertEqual(ret, True) ret = bugdown.image_preview_enabled(no_previews=True) self.assertEqual(ret, False) ret = bugdown.url_embed_preview_enabled(message, realm) self.assertEqual(ret, True) ret = bugdown.url_embed_preview_enabled(message) self.assertEqual(ret, True) ret = bugdown.url_embed_preview_enabled(message, no_previews=True) self.assertEqual(ret, False) def test_inline_dropbox(self) -> None: msg = 'Look at how hilarious our old office was: https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG' image_info = {'image': 'https://photos-4.dropbox.com/t/2/AABIre1oReJgPYuc_53iv0IHq1vUzRaDg2rrCfTpiWMccQ/12/129/jpeg/1024x1024/2/_/0/4/IMG_0923.JPG/CIEBIAEgAiAHKAIoBw/ymdijjcg67hv2ta/AABz2uuED1ox3vpWWvMpBxu6a/IMG_0923.JPG', 'desc': 'Shared with Dropbox', 'title': 'IMG_0923.JPG'} with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=image_info): converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Look at how hilarious our old office was: <a href="https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG">https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG</a></p>\n<div class="message_inline_image"><a href="https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG" title="IMG_0923.JPG"><img src="https://www.dropbox.com/s/ymdijjcg67hv2ta/IMG_0923.JPG?dl=1"></a></div>') msg = 'Look at my hilarious drawing folder: https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=' image_info = {'image': 'https://cf.dropboxstatic.com/static/images/icons128/folder_dropbox.png', 'desc': 'Shared with Dropbox', 'title': 'Saves'} with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=image_info): converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Look at my hilarious drawing folder: <a href="https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=">https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=</a></p>\n<div class="message_inline_ref"><a href="https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=" title="Saves"><img src="https://cf.dropboxstatic.com/static/images/icons128/folder_dropbox.png"></a><div><div class="message_inline_image_title">Saves</div><desc class="message_inline_image_desc"></desc></div></div>') def test_inline_dropbox_preview(self) -> None: # Test photo album previews msg = 'https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5' image_info = {'image': 'https://photos-6.dropbox.com/t/2/AAAlawaeD61TyNewO5vVi-DGf2ZeuayfyHFdNTNzpGq-QA/12/271544745/jpeg/1024x1024/2/_/0/5/baby-piglet.jpg/CKnjvYEBIAIgBygCKAc/tditp9nitko60n5/AADX03VAIrQlTl28CtujDcMla/0', 'desc': 'Shared with Dropbox', 'title': '1 photo'} with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=image_info): converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5">https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5</a></p>\n<div class="message_inline_image"><a href="https://www.dropbox.com/sc/tditp9nitko60n5/03rEiZldy5" title="1 photo"><img src="https://photos-6.dropbox.com/t/2/AAAlawaeD61TyNewO5vVi-DGf2ZeuayfyHFdNTNzpGq-QA/12/271544745/jpeg/1024x1024/2/_/0/5/baby-piglet.jpg/CKnjvYEBIAIgBygCKAc/tditp9nitko60n5/AADX03VAIrQlTl28CtujDcMla/0"></a></div>') def test_inline_dropbox_negative(self) -> None: # Make sure we're not overzealous in our conversion: msg = 'Look at the new dropbox logo: https://www.dropbox.com/static/images/home_logo.png' with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=None): converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Look at the new dropbox logo: <a href="https://www.dropbox.com/static/images/home_logo.png">https://www.dropbox.com/static/images/home_logo.png</a></p>\n<div class="message_inline_image"><a href="https://www.dropbox.com/static/images/home_logo.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fwww.dropbox.com%2Fstatic%2Fimages%2Fhome_logo.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fwww.dropbox.com%2Fstatic%2Fimages%2Fhome_logo.png&amp;size=thumbnail"></a></div>') def test_inline_dropbox_bad(self) -> None: msg = "https://zulip-test.dropbox.com/photos/cl/ROmr9K1XYtmpneM" with mock.patch('zerver.lib.bugdown.fetch_open_graph_image', return_value=None): converted = bugdown_convert(msg) self.assertEqual(converted, '<p><a href="https://zulip-test.dropbox.com/photos/cl/ROmr9K1XYtmpneM">https://zulip-test.dropbox.com/photos/cl/ROmr9K1XYtmpneM</a></p>') def test_inline_github_preview(self) -> None: # Test photo album previews msg = 'Test: https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Test: <a href="https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png">https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png</a></p>\n<div class="message_inline_image"><a href="https://github.com/zulip/zulip/blob/master/static/images/logo/zulip-icon-128x128.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fraw.githubusercontent.com%2Fzulip%2Fzulip%2Fmaster%2Fstatic%2Fimages%2Flogo%2Fzulip-icon-128x128.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fraw.githubusercontent.com%2Fzulip%2Fzulip%2Fmaster%2Fstatic%2Fimages%2Flogo%2Fzulip-icon-128x128.png&amp;size=thumbnail"></a></div>') msg = 'Test: https://developer.github.com/assets/images/hero-circuit-bg.png' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>Test: <a href="https://developer.github.com/assets/images/hero-circuit-bg.png">https://developer.github.com/assets/images/hero-circuit-bg.png</a></p>\n<div class="message_inline_image"><a href="https://developer.github.com/assets/images/hero-circuit-bg.png"><img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fdeveloper.github.com%2Fassets%2Fimages%2Fhero-circuit-bg.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fdeveloper.github.com%2Fassets%2Fimages%2Fhero-circuit-bg.png&amp;size=thumbnail"></a></div>') def test_twitter_id_extraction(self) -> None: self.assertEqual(bugdown.get_tweet_id('http://twitter.com/ self.assertEqual(bugdown.get_tweet_id('http://twitter.com/VizzQuotes/status/409030735191097344'), '409030735191097344') self.assertEqual(bugdown.get_tweet_id('http://twitter.com/VizzQuotes/statuses/409030735191097344'), '409030735191097344') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/wdaher/status/1017581858'), '1017581858') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/wdaher/status/1017581858/'), '1017581858') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/windyoona/status/410766290349879296/photo/1'), '410766290349879296') self.assertEqual(bugdown.get_tweet_id('https://twitter.com/windyoona/status/410766290349879296/'), '410766290349879296') def test_inline_interesting_links(self) -> None: def make_link(url: str) -> str: return f'<a href="{url}">{url}</a>' normal_tweet_html = ('<a href="https://twitter.com/Twitter"' '>@Twitter</a> ' 'meets @seepicturely at '<a href="https://twitter.com/boscomonkey"' '>@boscomonkey</a> ' '<a href="https://twitter.com/episod"' '>@episod</a> ' '<a href="http://t.co/6J2EgYM"' '>http://instagr.am/p/MuW67/</a>') mention_in_link_tweet_html = """<a href="http://t.co/@foo">http://foo.com</a>""" media_tweet_html = ('<a href="http://t.co/xo7pAhK6n3">' 'http://twitter.com/NEVNBoston/status/421654515616849920/photo/1</a>') emoji_in_tweet_html = """Zulip is <span aria-label=\"100\" class="emoji emoji-1f4af" role=\"img\" title="100">:100:</span>% open-source!""" def make_inline_twitter_preview(url: str, tweet_html: str, image_html: str='') -> str: ## As of right now, all previews are mocked to be the exact same tweet return ('<div class="inline-preview-twitter">' '<div class="twitter-tweet">' f'<a href="{url}">' '<img class="twitter-avatar"' ' src="https://external-content.zulipcdn.net/external_content/1f7cd2436976d410eab8189ebceda87ae0b34ead/687474703a2f2f7062732e7477696d672e63' '6f6d2f70726f66696c655f696d616765732f313338303931323137332f53637265656e5f73686f745f323031312d30362d30335f61745f372e33352e33' '365f504d5f6e6f726d616c2e706e67">' '</a>' f'<p>{tweet_html}</p>' '<span>- Eoin McMillan (@imeoin)</span>' f'{image_html}' '</div>' '</div>') msg = 'http://www.twitter.com' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com'))) msg = 'http://www.twitter.com/wdaher/' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/'))) msg = 'http://www.twitter.com/wdaher/status/3' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/3'))) # id too long msg = 'http://www.twitter.com/wdaher/status/2879779692873154569' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/2879779692873154569'))) # id too large (i.e. tweet doesn't exist) msg = 'http://www.twitter.com/wdaher/status/999999999999999999' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>'.format(make_link('http://www.twitter.com/wdaher/status/999999999999999999'))) msg = 'http://www.twitter.com/wdaher/status/287977969287315456' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://www.twitter.com/wdaher/status/287977969287315456'), make_inline_twitter_preview('http://www.twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) msg = 'https://www.twitter.com/wdaher/status/287977969287315456' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('https://www.twitter.com/wdaher/status/287977969287315456'), make_inline_twitter_preview('https://www.twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) msg = 'http://twitter.com/wdaher/status/287977969287315456' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315456'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) msg = ('http://twitter.com/wdaher/status/287977969287315456 ' 'http://twitter.com/wdaher/status/287977969287315457 ' 'http://twitter.com/wdaher/status/287977969287315457 ' 'http://twitter.com/wdaher/status/287977969287315457') converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{} {} {} {}</p>\n{}{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315456'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315457', normal_tweet_html))) msg = ('http://twitter.com/wdaher/status/287977969287315456 ' 'http://twitter.com/wdaher/status/287977969287315457 ' 'https://twitter.com/wdaher/status/287977969287315456 ' 'http://twitter.com/wdaher/status/287977969287315460') converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{} {} {} {}</p>\n{}{}{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315456'), make_link('http://twitter.com/wdaher/status/287977969287315457'), make_link('https://twitter.com/wdaher/status/287977969287315456'), make_link('http://twitter.com/wdaher/status/287977969287315460'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315456', normal_tweet_html), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315457', normal_tweet_html), make_inline_twitter_preview('https://twitter.com/wdaher/status/287977969287315456', normal_tweet_html))) msg = 'http://twitter.com/wdaher/status/287977969287315458' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315458'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315458', mention_in_link_tweet_html))) msg = 'http://twitter.com/wdaher/status/287977969287315459' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315459'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315459', media_tweet_html, ('<div class="twitter-image">' '<a href="http://t.co/xo7pAhK6n3">' '<img src="https://pbs.twimg.com/media/BdoEjD4IEAIq86Z.jpg:small">' '</a>' '</div>')))) msg = 'http://twitter.com/wdaher/status/287977969287315460' converted = bugdown_convert(msg) self.assertEqual(converted, '<p>{}</p>\n{}'.format( make_link('http://twitter.com/wdaher/status/287977969287315460'), make_inline_twitter_preview('http://twitter.com/wdaher/status/287977969287315460', emoji_in_tweet_html))) def test_fetch_tweet_data_settings_validation(self) -> None: with self.settings(TEST_SUITE=False, TWITTER_CONSUMER_KEY=None): self.assertIs(None, bugdown.fetch_tweet_data('287977969287315459')) def test_content_has_emoji(self) -> None: self.assertFalse(bugdown.content_has_emoji_syntax('boring')) self.assertFalse(bugdown.content_has_emoji_syntax('hello: world')) self.assertFalse(bugdown.content_has_emoji_syntax(':foobar')) self.assertFalse(bugdown.content_has_emoji_syntax('::: hello :::')) self.assertTrue(bugdown.content_has_emoji_syntax('foo :whatever:')) self.assertTrue(bugdown.content_has_emoji_syntax('\n:whatever:')) self.assertTrue(bugdown.content_has_emoji_syntax(':smile: ::::::')) def test_realm_emoji(self) -> None: def emoji_img(name: str, file_name: str, realm_id: int) -> str: return '<img alt="{}" class="emoji" src="{}" title="{}">'.format( name, get_emoji_url(file_name, realm_id), name[1:-1].replace("_", " ")) realm = get_realm('zulip') msg = Message(sender=self.example_user('hamlet')) converted = bugdown.convert(":green_tick:", message_realm=realm, message=msg) realm_emoji = RealmEmoji.objects.filter(realm=realm, name='green_tick', deactivated=False).get() self.assertEqual(converted, '<p>{}</p>'.format(emoji_img(':green_tick:', realm_emoji.file_name, realm.id))) # Deactivate realm emoji. do_remove_realm_emoji(realm, 'green_tick') converted = bugdown.convert(":green_tick:", message_realm=realm, message=msg) self.assertEqual(converted, '<p>:green_tick:</p>') def test_deactivated_realm_emoji(self) -> None: # Deactivate realm emoji. realm = get_realm('zulip') do_remove_realm_emoji(realm, 'green_tick') msg = Message(sender=self.example_user('hamlet')) converted = bugdown.convert(":green_tick:", message_realm=realm, message=msg) self.assertEqual(converted, '<p>:green_tick:</p>') def test_unicode_emoji(self) -> None: msg = '\u2615' # ☕ converted = bugdown_convert(msg) self.assertEqual(converted, '<p><span aria-label=\"coffee\" class="emoji emoji-2615" role=\"img\" title="coffee">:coffee:</span></p>') msg = '\u2615\u2615' # ☕☕ converted = bugdown_convert(msg) self.assertEqual(converted, '<p><span aria-label=\"coffee\" class="emoji emoji-2615" role=\"img\" title="coffee">:coffee:</span><span aria-label=\"coffee\" class="emoji emoji-2615" role=\"img\" title="coffee">:coffee:</span></p>') def test_no_translate_emoticons_if_off(self) -> None: user_profile = self.example_user('othello') do_set_user_display_setting(user_profile, 'translate_emoticons', False) msg = Message(sender=user_profile, sending_client=get_client("test")) content = ':)' expected = '<p>:)</p>' converted = render_markdown(msg, content) self.assertEqual(converted, expected) def test_same_markup(self) -> None: msg = '\u2615' # ☕ unicode_converted = bugdown_convert(msg) msg = ':coffee:' # ☕☕ converted = bugdown_convert(msg) self.assertEqual(converted, unicode_converted) def test_links_in_topic_name(self) -> None: realm = get_realm('zulip') msg = Message(sender=self.example_user('othello')) msg.set_topic_name("https://google.com/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['https://google.com/hello-world']) msg.set_topic_name("http://google.com/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['http://google.com/hello-world']) msg.set_topic_name("Without scheme google.com/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['https://google.com/hello-world']) msg.set_topic_name("Without scheme random.words/hello-world") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, []) msg.set_topic_name("Try out http://ftp.debian.org, https://google.com/ and https://google.in/.") converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted_topic, ['http://ftp.debian.org', 'https://google.com/', 'https://google.in/']) def test_realm_patterns(self) -> None: realm = get_realm('zulip') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): ' https://trac.example.com/ticket/%(id)s>') msg = Message(sender=self.example_user('othello')) msg.set_topic_name("#444") flush_per_request_caches() content = "We should fix #224 and #115, but not issue#124 or #1124z or [trac #15](https://trac.example.com/ticket/16) today." converted = bugdown.convert(content, message_realm=realm, message=msg) converted_topic = bugdown.topic_links(realm.id, msg.topic_name()) self.assertEqual(converted, '<p>We should fix <a href="https://trac.example.com/ticket/224">ps://google.com']) RealmFilter(realm=realm, pattern=r' url_format_string=r'https://trac.example.com/ticket/%(id)s').save() msg = Message(sender=self.example_user('hamlet')) content = 'ert(content, message_realm=realm, message=msg) self.assertEqual(converted, '<p><a href="https://trac.example.com/ticket/ZUL-123">tr, convert: bool=True) -> None: converted = bugdown.convert(content, message_realm=realm, message=msg) converted_topic = bugdown.topic_links(realm.id, content) if convert: self.assertTrue('trac.example.com' in converted) self.assertEqual(len(converted_topic), 1) self.assertTrue('trac.example.com' in converted_topic[0]) else: self.assertTrue('trac.example.com' not in converted) self.assertEqual(len(converted_topic), 0) assert_conversion('Hello assert_conversion('Hello assert_conversion('Hello assert_conversion('Hello # Ideally, these should be converted, but bugdown doesn't # whitespace for that correctly yet. assert_conversion('チケットは assert_conversion('チケットは assert_conversion('チケットは assert_conversion('チケットは assert_conversion('( assert_conversion(' assert_conversion('"#123"') assert_conversion(' assert_conversion(') assert_conversion('est nested realm patterns should avoid double matching RealmFilter(realm=realm, pattern=r'hello url_format_string=r'https://trac.example.com/hello/%(id)s').save() converted_topic = bugdown.topic_links(realm.id, 'hello self.assertEqual(converted_topic, ['https://trac.example.com/ticket/234', 'https://trac.example.com/hello/123']) def test_maybe_update_markdown_engines(self) -> None: realm = get_realm('zulip') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() bugdown.realm_filter_data = {} bugdown.maybe_update_markdown_engines(None, False) all_filters = bugdown.realm_filter_data zulip_filters = all_filters[realm.id] self.assertEqual(len(zulip_filters), 1) self.assertEqual(zulip_filters[0], (' def test_flush_realm_filter(self) -> None: realm = get_realm('zulip') def flush() -> None: class Instance: realm_id: Optional[int] = None instance = Instance() instance.realm_id = realm.id flush_realm_filter(sender=None, instance=instance) def save_new_realm_filter() -> None: realm_filter = RealmFilter(realm=realm, pattern=r"whatever", url_format_string='whatever') realm_filter.save() # start fresh for our realm flush() self.assertFalse(realm_in_local_realm_filters_cache(realm.id)) # call this just for side effects of populating the cache realm_filters_for_realm(realm.id) self.assertTrue(realm_in_local_realm_filters_cache(realm.id)) # Saving a new RealmFilter should have the side effect of # flushing the cache. save_new_realm_filter() self.assertFalse(realm_in_local_realm_filters_cache(realm.id)) # and flush it one more time, to make sure we don't get a KeyError flush() self.assertFalse(realm_in_local_realm_filters_cache(realm.id)) def test_realm_patterns_negative(self) -> None: realm = get_realm('zulip') RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=r"https://trac.example.com/ticket/%(id)s").save() boring_msg = Message(sender=self.example_user('othello')) boring_msg.set_topic_name("no match here") converted_boring_topic = bugdown.topic_links(realm.id, boring_msg.topic_name()) self.assertEqual(converted_boring_topic, []) def test_is_status_message(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = '/me makes a list\n* one\n* two' rendered_content = render_markdown(msg, content) self.assertEqual( rendered_content, '<p>/me makes a list</p>\n<ul>\n<li>one</li>\n<li>two</li>\n</ul>', ) self.assertTrue(Message.is_status_message(content, rendered_content)) content = '/me takes a walk' rendered_content = render_markdown(msg, content) self.assertEqual( rendered_content, '<p>/me takes a walk</p>', ) self.assertTrue(Message.is_status_message(content, rendered_content)) content = '/me writes a second line\nline' rendered_content = render_markdown(msg, content) self.assertEqual( rendered_content, '<p>/me writes a second line<br>\nline</p>', ) self.assertTrue(Message.is_status_message(content, rendered_content)) def test_alert_words(self) -> None: user_profile = self.example_user('othello') do_add_alert_words(user_profile, ["ALERTWORD", "scaryword"]) msg = Message(sender=user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = "We have an ALERTWORD day today!" self.assertEqual(render(msg, content), "<p>We have an ALERTWORD day today!</p>") self.assertEqual(msg.user_ids_with_alert_words, {user_profile.id}) msg = Message(sender=user_profile, sending_client=get_client("test")) content = "We have a NOTHINGWORD day today!" self.assertEqual(render(msg, content), "<p>We have a NOTHINGWORD day today!</p>") self.assertEqual(msg.user_ids_with_alert_words, set()) def test_alert_words_returns_user_ids_with_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['how'], 'cordelia': ['this possible'], 'iago': ['hello'], 'prospero': ['hello'], 'othello': ['how are you'], 'aaron': ['hey'], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = "hello how is this possible how are you doing today" render(msg, content) expected_user_ids: Set[int] = { user_profiles['hamlet'].id, user_profiles['cordelia'].id, user_profiles['iago'].id, user_profiles['prospero'].id, user_profiles['othello'].id, } self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_user_ids_with_alert_words_1(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['provisioning', 'Prod deployment'], 'cordelia': ['test', 'Prod'], 'iago': ['prod'], 'prospero': ['deployment'], 'othello': ['last'], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """Hello, everyone. Prod deployment has been completed And this is a new line to test out how markdown convert this into something line ending splitted array and this is a new line last""" render(msg, content) expected_user_ids: Set[int] = { user_profiles['hamlet'].id, user_profiles['cordelia'].id, user_profiles['iago'].id, user_profiles['prospero'].id, user_profiles['othello'].id, } self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_user_ids_with_alert_words_in_french(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['réglementaire', 'une politique', 'une merveille'], 'cordelia': ['énormément', 'Prod'], 'iago': ['prod'], 'prospero': ['deployment'], 'othello': ['last'], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """This is to test out alert words work in languages with accented characters too bonjour est (énormément) ce a quoi ressemble le français et j'espère qu'il n'y n' réglementaire a pas de mots d'alerte dans ce texte français """ render(msg, content) expected_user_ids: Set[int] = {user_profiles['hamlet'].id, user_profiles['cordelia'].id} # Only hamlet and cordelia have their alert-words appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_empty_user_ids_with_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': [], 'cordelia': [], 'iago': [], 'prospero': [], 'othello': [], 'aaron': [], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """hello how is this possible how are you doing today This is to test that the no user_ids who have alrert wourldword is participating in sending of the message """ render(msg, content) expected_user_ids: Set[int] = set() # None of the users have their alert-words appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def get_mock_alert_words(self, num_words: int, word_length: int) -> List[str]: alert_words = ['x' * word_length] * num_words # type List[str] return alert_words def test_alert_words_with_empty_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': [], 'cordelia': [], 'iago': [], 'othello': [], } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """This is to test a empty alert words i.e. no user has any alert-words set""" render(msg, content) expected_user_ids: Set[int] = set() self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_retuns_user_ids_with_alert_words_with_huge_alert_words(self) -> None: alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['issue124'], 'cordelia': self.get_mock_alert_words(500, 10), 'iago': self.get_mock_alert_words(500, 10), 'othello': self.get_mock_alert_words(500, 10), } user_profiles: Dict[str, UserProfile] = {} for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) do_add_alert_words(user_profile, alert_words) sender_user_profile = self.example_user('polonius') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm_alert_words_automaton = get_alert_word_automaton(sender_user_profile.realm) def render(msg: Message, content: str) -> str: return render_markdown(msg, content, realm_alert_words_automaton=realm_alert_words_automaton) content = """The code above will print 10 random values of numbers between 1 and 100. The second line, for x in range(10), determines how many values will be printed (when you use range(x), the number that you use in place of x will be the amount of values that you'll have printed. if you want 20 values, use range(20). use range(5) if you only want 5 values returned, etc.). I was talking abou the issue124 on github. Then the third line: print random.randint(1,101) will automatically select a random integer between 1 and 100 for you. The process is fairly simple """ render(msg, content) expected_user_ids: Set[int] = {user_profiles['hamlet'].id} self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_default_code_block_language(self) -> None: realm = get_realm('zulip') self.assertEqual(realm.default_code_block_language, None) text = "```{}\nconsole.log('Hello World');\n```\n" msg_with_js = bugdown_convert(text.format('js')) msg_with_python = bugdown_convert(text.format('python')) msg_without_language = bugdown_convert(text.format('')) msg_with_quote = bugdown_convert(text.format('quote')) msg_with_math = bugdown_convert(text.format('math')) do_set_realm_property(realm, 'default_code_block_language', 'javascript') msg_without_language_default_js = bugdown_convert(text.format('')) msg_with_python_default_js = bugdown_convert(text.format('python')) do_set_realm_property(realm, 'default_code_block_language', 'python') msg_without_language_default_py = bugdown_convert(text.format('')) msg_with_none_default_py = bugdown_convert(text.format('none')) do_set_realm_property(realm, 'default_code_block_language', 'quote') msg_without_language_default_quote = bugdown_convert(text.format('')) do_set_realm_property(realm, 'default_code_block_language', 'math') msg_without_language_default_math = bugdown_convert(text.format('')) do_set_realm_property(realm, 'default_code_block_language', None) msg_without_language_final = bugdown_convert(text.format('')) self.assertTrue(msg_with_js == msg_without_language_default_js) self.assertTrue(msg_with_python == msg_with_python_default_js == msg_without_language_default_py) self.assertTrue(msg_with_quote == msg_without_language_default_quote) self.assertTrue(msg_with_math == msg_without_language_default_math) self.assertTrue(msg_without_language == msg_with_none_default_py == msg_without_language_final) nested_text = "````quote\n\n{}\n\n{}````".format(text.format('js'), text.format('')) do_set_realm_property(realm, 'default_code_block_language', 'javascript') rendered = bugdown_convert(nested_text) with_language, without_language = re.findall(r'<pre>(.*?)$', rendered, re.MULTILINE) self.assertTrue(with_language == without_language) do_set_realm_property(realm, 'default_code_block_language', None) rendered = bugdown_convert(nested_text) with_language, without_language = re.findall(r'<pre>(.*?)$', rendered, re.MULTILINE) self.assertFalse(with_language == without_language) def test_mention_wildcard(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@**all** test" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" data-user-id="*">' '@all' '</span> test</p>') self.assertTrue(msg.mentions_wildcard) def test_mention_everyone(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@**everyone** test" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" data-user-id="*">' '@everyone' '</span> test</p>') self.assertTrue(msg.mentions_wildcard) def test_mention_stream(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@**stream** test" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" data-user-id="*">' '@stream' '</span> test</p>') self.assertTrue(msg.mentions_wildcard) def test_mention_at_wildcard(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@all test" self.assertEqual(render_markdown(msg, content), '<p>@all test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_at_everyone(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@everyone test" self.assertEqual(render_markdown(msg, content), '<p>@everyone test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_word_starting_with_at_wildcard(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "test @alleycat.com test" self.assertEqual(render_markdown(msg, content), '<p>test @alleycat.com test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_at_normal_user(self) -> None: user_profile = self.example_user('othello') msg = Message(sender=user_profile, sending_client=get_client("test")) content = "@aaron test" self.assertEqual(render_markdown(msg, content), '<p>@aaron test</p>') self.assertFalse(msg.mentions_wildcard) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_single(self) -> None: sender_user_profile = self.example_user('othello') user_profile = self.example_user('hamlet') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_id = user_profile.id content = "@**King Hamlet**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{user_id}">' '@King Hamlet</span></p>') self.assertEqual(msg.mentions_user_ids, {user_profile.id}) def test_mention_silent(self) -> None: sender_user_profile = self.example_user('othello') user_profile = self.example_user('hamlet') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_id = user_profile.id content = "@_**King Hamlet**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention silent" ' f'data-user-id="{user_id}">' 'King Hamlet</span></p>') self.assertEqual(msg.mentions_user_ids, set()) def test_possible_mentions(self) -> None: def assert_mentions(content: str, names: Set[str], has_wildcards: bool=False) -> None: self.assertEqual(possible_mentions(content), (names, has_wildcards)) assert_mentions('', set()) assert_mentions('boring', set()) assert_mentions('@**all**', set(), True) assert_mentions('smush@**steve**smush', set()) assert_mentions( 'Hello @**King Hamlet** and @**Cordelia Lear**\n@**Foo van Barson|1234** @**all**', {'King Hamlet', 'Cordelia Lear', 'Foo van Barson|1234'}, True, ) def test_mention_multiple(self) -> None: sender_user_profile = self.example_user('othello') hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "@**King Hamlet** and @**Cordelia Lear**, check this out" self.assertEqual(render_markdown(msg, content), '<p>' '<span class="user-mention" ' f'data-user-id="{hamlet.id}">@King Hamlet</span> and ' '<span class="user-mention" ' f'data-user-id="{cordelia.id}">@Cordelia Lear</span>, ' 'check this out</p>') self.assertEqual(msg.mentions_user_ids, {hamlet.id, cordelia.id}) def test_mention_in_quotes(self) -> None: othello = self.example_user('othello') hamlet = self.example_user('hamlet') cordelia = self.example_user('cordelia') msg = Message(sender=othello, sending_client=get_client("test")) content = "> @**King Hamlet** and @**Othello, the Moor of Venice**\n\n @**King Hamlet** and @**Cordelia Lear**" self.assertEqual(render_markdown(msg, content), '<blockquote>\n<p>' f'<span class="user-mention silent" data-user-id="{hamlet.id}">King Hamlet</span>' ' and ' f'<span class="user-mention silent" data-user-id="{othello.id}">Othello, the Moor of Venice</span>' '</p>\n</blockquote>\n' '<p>' f'<span class="user-mention" data-user-id="{hamlet.id}">@King Hamlet</span>' ' and ' f'<span class="user-mention" data-user-id="{cordelia.id}">@Cordelia Lear</span>' '</p>') self.assertEqual(msg.mentions_user_ids, {hamlet.id, cordelia.id}) expected = ('<blockquote>\n<p>' f'<span class="user-mention silent" data-user-id="{hamlet.id}">King Hamlet</span>' '</p>\n</blockquote>') content = "```quote\n@**King Hamlet**\n```" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) content = "> @**King Hamlet**" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) content = "```quote\n@_**King Hamlet**\n```" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) content = "> @_**King Hamlet**" self.assertEqual(render_markdown(msg, content), expected) self.assertEqual(msg.mentions_user_ids, set()) def test_mention_duplicate_full_name(self) -> None: realm = get_realm('zulip') def make_user(email: str, full_name: str) -> UserProfile: return create_user( email=email, password='whatever', realm=realm, full_name=full_name, short_name='whatever', ) sender_user_profile = self.example_user('othello') twin1 = make_user('twin1@example.com', 'Mark Twin') twin2 = make_user('twin2@example.com', 'Mark Twin') cordelia = self.example_user('cordelia') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = f"@**Mark Twin|{twin1.id}**, @**Mark Twin|{twin2.id}** and @**Cordelia Lear**, hi." self.assertEqual(render_markdown(msg, content), '<p>' '<span class="user-mention" ' f'data-user-id="{twin1.id}">@Mark Twin</span>, ' '<span class="user-mention" ' f'data-user-id="{twin2.id}">@Mark Twin</span> and ' '<span class="user-mention" ' f'data-user-id="{cordelia.id}">@Cordelia Lear</span>, ' 'hi.</p>') self.assertEqual(msg.mentions_user_ids, {twin1.id, twin2.id, cordelia.id}) def test_mention_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "Hey @**Nonexistent User**" self.assertEqual(render_markdown(msg, content), '<p>Hey @<strong>Nonexistent User</strong></p>') self.assertEqual(msg.mentions_user_ids, set()) def test_user_mention_atomic_string(self) -> None: sender_user_profile = self.example_user('othello') realm = get_realm('zulip') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') test_user = create_user(email='atomic@example.com', password='whatever', realm=realm, full_name='Atomic #123', short_name='whatever') content = "@**Atomic #123**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{test_user.id}">' '@Atomic #123</span></p>') self.assertEqual(msg.mentions_user_ids, {test_user.id}) content = "@_**Atomic #123**" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention silent" ' f'data-user-id="{test_user.id}">' 'Atomic #123</span></p>') self.assertEqual(msg.mentions_user_ids, set()) def create_user_group_for_test(self, user_group_name: str) -> UserGroup: othello = self.example_user('othello') return create_user_group(user_group_name, [othello], get_realm('zulip')) def test_user_group_mention_single(self) -> None: sender_user_profile = self.example_user('othello') user_profile = self.example_user('hamlet') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_id = user_profile.id user_group = self.create_user_group_for_test('support') content = "@**King Hamlet** @*support*" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{user_id}">' '@King Hamlet</span> ' '<span class="user-group-mention" ' f'data-user-group-id="{user_group.id}">' '@support</span></p>') self.assertEqual(msg.mentions_user_ids, {user_profile.id}) self.assertEqual(msg.mentions_user_group_ids, {user_group.id}) def test_user_group_mention_atomic_string(self) -> None: sender_user_profile = self.example_user('othello') realm = get_realm('zulip') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) user_profile = self.example_user('hamlet') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') user_id = user_profile.id user_group = self.create_user_group_for_test('support #123') content = "@**King Hamlet** @*support #123*" self.assertEqual(render_markdown(msg, content), '<p><span class="user-mention" ' f'data-user-id="{user_id}">' '@King Hamlet</span> ' '<span class="user-group-mention" ' f'data-user-group-id="{user_group.id}">' '@support #123</span></p>') self.assertEqual(msg.mentions_user_ids, {user_profile.id}) self.assertEqual(msg.mentions_user_group_ids, {user_group.id}) def test_possible_user_group_mentions(self) -> None: def assert_mentions(content: str, names: Set[str]) -> None: self.assertEqual(possible_user_group_mentions(content), names) assert_mentions('', set()) assert_mentions('boring', set()) assert_mentions('@**all**', set()) assert_mentions('smush@*steve*smush', set()) assert_mentions( '@*support* Hello @**King Hamlet** and @**Cordelia Lear**\n' '@**Foo van Barson** @**all**', {'support'}, ) assert_mentions( 'Attention @*support*, @*frontend* and @*backend*\ngroups.', {'support', 'frontend', 'backend'}, ) def test_user_group_mention_multiple(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) support = self.create_user_group_for_test('support') backend = self.create_user_group_for_test('backend') content = "@*support* and @*backend*, check this out" self.assertEqual(render_markdown(msg, content), '<p>' '<span class="user-group-mention" ' f'data-user-group-id="{support.id}">' '@support</span> ' 'and ' '<span class="user-group-mention" ' f'data-user-group-id="{backend.id}">' '@backend</span>, ' 'check this out' '</p>') self.assertEqual(msg.mentions_user_group_ids, {support.id, backend.id}) def test_user_group_mention_edit(self) -> None: sender_user_profile = self.example_user('hamlet') user_profile = self.example_user('othello') self.create_user_group_for_test('support') self.login('hamlet') msg_id = self.send_stream_message(sender_user_profile, "Denmark", topic_name="editing", content='test') def update_message_and_check_flag(content: str, mentioned: bool) -> None: result = self.client_patch("/json/messages/" + str(msg_id), { 'message_id': msg_id, 'content': content, }) self.assert_json_success(result) um = UserMessage.objects.get( user_profile_id=user_profile.id, message_id=msg_id, ) if mentioned: self.assertIn('mentioned', um.flags_list()) else: self.assertNotIn('mentioned', um.flags_list()) update_message_and_check_flag("@*support*", True) update_message_and_check_flag("@*support-invalid* edited", False) update_message_and_check_flag("@*support* edited", True) update_message_and_check_flag("edited", False) update_message_and_check_flag("@*support*", True) def test_user_group_mention_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "Hey @*Nonexistent group*" self.assertEqual(render_markdown(msg, content), '<p>Hey @<em>Nonexistent group</em></p>') self.assertEqual(msg.mentions_user_group_ids, set()) def test_stream_single(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Denmark**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark">#{d.name}</a></p>'.format( d=denmark, )) def test_stream_multiple(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) realm = get_realm('zulip') denmark = get_stream('Denmark', realm) scotland = get_stream('Scotland', realm) content = "Look to #**Denmark** and #**Scotland**, there something" self.assertEqual(render_markdown(msg, content), '<p>Look to ' '<a class="stream" ' 'data-stream-id="{denmark.id}" ' 'href="/#narrow/stream/{denmark.id}-Denmark">#{denmark.name}</a> and ' '<a class="stream" ' 'data-stream-id="{scotland.id}" ' 'href="/#narrow/stream/{scotland.id}-Scotland">#{scotland.name}</a>, ' 'there something</p>'.format(denmark=denmark, scotland=scotland)) def test_stream_case_sensitivity(self) -> None: realm = get_realm('zulip') case_sens = Stream.objects.create(name='CaseSens', realm=realm) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**CaseSens**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{s.id}" href="/#narrow/stream/{s.id}-{s.name}">#{s.name}</a></p>'.format( s=case_sens, )) def test_stream_case_sensitivity_nonmatching(self) -> None: realm = get_realm('zulip') Stream.objects.create(name='CaseSens', realm=realm) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**casesens**" self.assertEqual( render_markdown(msg, content), '<p>#<strong>casesens</strong></p>') def test_topic_single(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Denmark>some topic**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream-topic" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark/topic/some.20topic">#{d.name} &gt; some topic</a></p>'.format( d=denmark, )) def test_topic_atomic_string(self) -> None: realm = get_realm('zulip') sender_user_profile = self.example_user('othello') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') denmark = get_stream('Denmark', realm) msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Denmark>#1234**" self.assertEqual( render_markdown(msg, content), '<p><a class="stream-topic" data-stream-id="{d.id}" href="/#narrow/stream/{d.id}-Denmark/topic/.231234">#{d.name} &gt; #1234</a></p>'.format( d=denmark, )) def test_topic_multiple(self) -> None: denmark = get_stream('Denmark', get_realm('zulip')) scotland = get_stream('Scotland', get_realm('zulip')) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "This has two links: #**Denmark>some topic** and #**Scotland>other topic**." self.assertEqual( render_markdown(msg, content), '<p>This has two links: ' '<a class="stream-topic" data-stream-id="{denmark.id}" ' 'href="/#narrow/stream/{denmark.id}-{denmark.name}/topic/some.20topic">' '#{denmark.name} &gt; some topic</a>' ' and ' '<a class="stream-topic" data-stream-id="{scotland.id}" ' 'href="/#narrow/stream/{scotland.id}-{scotland.name}/topic/other.20topic">' '#{scotland.name} &gt; other topic</a>' '.</p>'.format(denmark=denmark, scotland=scotland)) def test_possible_stream_names(self) -> None: content = '''#**test here** This mentions #**Denmark** too. #**garçon** #**천국** @**Ignore Person** ''' self.assertEqual( bugdown.possible_linked_stream_names(content), {'test here', 'Denmark', 'garçon', '천국'}, ) def test_stream_unicode(self) -> None: realm = get_realm('zulip') uni = Stream.objects.create(name='привет', realm=realm) sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**привет**" quoted_name = '.D0.BF.D1.80.D0.B8.D0.B2.D0.B5.D1.82' href = f'/#narrow/stream/{uni.id}-{quoted_name}' self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{s.id}" href="{href}">#{s.name}</a></p>'.format( s=uni, href=href, )) def test_stream_atomic_string(self) -> None: realm = get_realm('zulip') sender_user_profile = self.example_user('othello') url_format_string = r"https://trac.example.com/ticket/%(id)s" realm_filter = RealmFilter(realm=realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string) realm_filter.save() self.assertEqual( realm_filter.__str__(), '<RealmFilter(zulip): #(?P<id>[0-9]{2,8})' ' https://trac.example.com/ticket/%(id)s>') stream = Stream.objects.create(name='Stream #1234', realm=realm) msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "#**Stream #1234**" href = f'/#narrow/stream/{stream.id}-Stream-.231234' self.assertEqual( render_markdown(msg, content), '<p><a class="stream" data-stream-id="{s.id}" href="{href}">#{s.name}</a></p>'.format( s=stream, href=href, )) def test_stream_invalid(self) -> None: sender_user_profile = self.example_user('othello') msg = Message(sender=sender_user_profile, sending_client=get_client("test")) content = "There #**Nonexistentstream**" self.assertEqual(render_markdown(msg, content), '<p>There #<strong>Nonexistentstream</strong></p>') self.assertEqual(msg.mentions_user_ids, set()) def test_image_preview_title(self) -> None: msg = '[My favorite image](https://example.com/testimage.png)' converted = bugdown_convert(msg) self.assertEqual( converted, '<p>' '<a href="https://example.com/testimage.png">My favorite image</a>' '</p>\n' '<div class="message_inline_image">' '<a href="https://example.com/testimage.png" title="My favorite image">' '<img data-src-fullsize="/thumbnail?url=https%3A%2F%2Fexample.com%2Ftestimage.png&amp;size=full" src="/thumbnail?url=https%3A%2F%2Fexample.com%2Ftestimage.png&amp;size=thumbnail">' '</a>' '</div>', ) def test_mit_rendering(self) -> None: msg = "**test**" realm = get_realm("zephyr") client = get_client("zephyr_mirror") message = Message(sending_client=client, sender=self.mit_user("sipbtest")) converted = bugdown.convert(msg, message_realm=realm, message=message) self.assertEqual( converted, "<p>**test**</p>", ) msg = "* test" converted = bugdown.convert(msg, message_realm=realm, message=message) self.assertEqual( converted, "<p>* test</p>", ) msg = "https://lists.debian.org/debian-ctte/2014/02/msg00173.html" converted = bugdown.convert(msg, message_realm=realm, message=message) self.assertEqual( converted, '<p><a href="https://lists.debian.org/debian-ctte/2014/02/msg00173.html">https://lists.debian.org/debian-ctte/2014/02/msg00173.html</a></p>', ) def test_url_to_a(self) -> None: url = 'javascript://example.com/invalidURL' converted = bugdown.url_to_a(db_data=None, url=url, text=url) self.assertEqual( converted, 'javascript://example.com/invalidURL', ) def test_disabled_code_block_processor(self) -> None: msg = "Hello,\n\n" + \ " I am writing this message to test something. I am writing this message to test something." converted = bugdown_convert(msg) expected_output = '<p>Hello,</p>\n' + \ '<div class="codehilite"><pre><span></span><code>I am writing this message to test something. I am writing this message to test something.\n' + \ '</code></pre></div>' self.assertEqual(converted, expected_output) realm = Realm.objects.create(string_id='code_block_processor_test') bugdown.maybe_update_markdown_engines(realm.id, True) converted = bugdown.convert(msg, message_realm=realm, email_gateway=True) expected_output = '<p>Hello,</p>\n' + \ '<p>I am writing this message to test something. I am writing this message to test something.</p>' self.assertEqual(converted, expected_output) def test_normal_link(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "http://example.com/#settings/" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="http://example.com/#settings/">http://example.com/#settings/</a></p>', ) def test_relative_link(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "http://zulip.testserver/#narrow/stream/999-hello" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="#narrow/stream/999-hello">http://zulip.testserver/#narrow/stream/999-hello</a></p>', ) def test_relative_link_streams_page(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "http://zulip.testserver/#streams/all" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="#streams/all">http://zulip.testserver/#streams/all</a></p>', ) def test_md_relative_link(self) -> None: realm = get_realm("zulip") sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) msg = "[hello](http://zulip.testserver/#narrow/stream/999-hello)" self.assertEqual( bugdown.convert(msg, message_realm=realm, message=message), '<p><a href="#narrow/stream/999-hello">hello</a></p>', ) class BugdownApiTests(ZulipTestCase): def test_render_message_api(self) -> None: content = 'That is a **bold** statement' result = self.api_post( self.example_user("othello"), '/api/v1/messages/render', dict(content=content), ) self.assert_json_success(result) self.assertEqual(result.json()['rendered'], '<p>That is a <strong>bold</strong> statement</p>') def test_render_mention_stream_api(self) -> None: content = 'This mentions #**Denmark** and @**King Hamlet**.' result = self.api_post( self.example_user("othello"), '/api/v1/messages/render', dict(content=content), ) self.assert_json_success(result) user_id = self.example_user('hamlet').id stream_id = get_stream('Denmark', get_realm('zulip')).id self.assertEqual(result.json()['rendered'], f'<p>This mentions <a class="stream" data-stream-id="{stream_id}" href="/#narrow/stream/{stream_id}-Denmark">#Denmark</a> and <span class="user-mention" data-user-id="{user_id}">@King Hamlet</span>.</p>') class BugdownErrorTests(ZulipTestCase): def test_bugdown_error_handling(self) -> None: with self.simulated_markdown_failure(): with self.assertRaises(BugdownRenderingException): bugdown_convert('') def test_send_message_errors(self) -> None: message = 'whatever' with self.simulated_markdown_failure(): # handle i18n properly here on some systems. with self.assertRaises(JsonableError): self.send_stream_message(self.example_user("othello"), "Denmark", message) def test_ultra_long_rendering(self) -> None: msg = 'mock rendered message\n' * MAX_MESSAGE_LENGTH with mock.patch('zerver.lib.bugdown.timeout', return_value=msg), \ mock.patch('zerver.lib.bugdown.bugdown_logger'): with self.assertRaises(BugdownRenderingException): bugdown_convert(msg) def test_curl_code_block_validation(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) processor.run_content_validators = True # Simulate code formatting. processor.format_code = lambda lang, code: lang + ':' + code # type: ignore[assignment] # mypy doesn't allow monkey-patching functions processor.placeholder = lambda s: '**' + s.strip('\n') + '**' l', 'curl {{ api_url }}/v1/register', ' -u BOT_EMAIL_ADDRESS:BOT_API_KEY', ' -d "queue_id=1375801870:2942"', '```', ] with self.assertRaises(BugdownRenderingException): processor.run(markdown) def test_curl_code_block_without_validation(self) -> None: processor = bugdown.fenced_code.FencedBlockPreprocessor(None) processor.format_code = lambda lang, code: lang + ':' + code ' + s.strip('\n') + '**' # type: ignore[assignment] # https://github.com/python/mypy/issues/708 markdown = [ '``` curl', 'curl {{ api_url }}/v1/register', ' -u BOT_EMAIL_ADDRESS:BOT_API_KEY', ' -d "queue_id=1375801870:2942"', '```', ] expected = [ '', '**curl:curl {{ api_url }}/v1/register', ' -u BOT_EMAIL_ADDRESS:BOT_API_KEY', ' -d "queue_id=1375801870:2942"**', '', '', ] result = processor.run(markdown) self.assertEqual(result, expected) class BugdownAvatarTestCase(ZulipTestCase): def test_possible_avatar_emails(self) -> None: content = ''' hello !avatar(foo@example.com) my email is ignore@ignore.com !gravatar(bar@yo.tv) smushing!avatar(hamlet@example.org) is allowed ''' self.assertEqual( bugdown.possible_avatar_emails(content), {'foo@example.com', 'bar@yo.tv', 'hamlet@example.org'}, ) def test_avatar_with_id(self) -> None: sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) user_profile = self.example_user('hamlet') msg = f'!avatar({user_profile.email})' converted = bugdown.convert(msg, message=message) values = {'email': user_profile.email, 'id': user_profile.id} self.assertEqual( converted, '<p><img alt="{email}" class="message_body_gravatar" src="/avatar/{id}?s=30" title="{email}"></p>'.format(**values)) def test_avatar_of_unregistered_user(self) -> None: sender_user_profile = self.example_user('othello') message = Message(sender=sender_user_profile, sending_client=get_client("test")) email = 'fakeuser@example.com' msg = f'!avatar({email})' converted = bugdown.convert(msg, message=message) self.assertEqual( converted, '<p><img alt="{0}" class="message_body_gravatar" src="/avatar/{0}?s=30" title="{0}"></p>'.format(email))
true
true
f711d5c3ee19c95b26ff713f3e6d532c9a88b33a
109
py
Python
python/karps/__init__.py
tjhunter/karps
7c74c3bf5b566264d6fed6e17fb1716216467a50
[ "Apache-2.0" ]
5
2017-10-25T10:53:47.000Z
2019-01-12T19:32:36.000Z
python/karps/__init__.py
tjhunter/karps
7c74c3bf5b566264d6fed6e17fb1716216467a50
[ "Apache-2.0" ]
15
2017-06-22T20:53:50.000Z
2017-10-22T00:47:00.000Z
python/karps/__init__.py
tjhunter/karps
7c74c3bf5b566264d6fed6e17fb1716216467a50
[ "Apache-2.0" ]
1
2018-08-23T04:25:57.000Z
2018-08-23T04:25:57.000Z
from .types import * from .row import * from .column import * from .utils import scope from .session import *
21.8
24
0.743119
from .types import * from .row import * from .column import * from .utils import scope from .session import *
true
true
f711d72a06f5cf2315676d521154b060c9767ffb
798
py
Python
xlsxwriter/test/worksheet/test_write_sheet_format_pr.py
eddiechapman/XlsxWriter
c636117ab30e64e4b7b824c9105595c42887c2c9
[ "BSD-2-Clause-FreeBSD" ]
1
2021-03-27T11:14:47.000Z
2021-03-27T11:14:47.000Z
xlsxwriter/test/worksheet/test_write_sheet_format_pr.py
xiaolanmeng86/XlsxWriter
6c3ea23a410e8216eab8f5751e5544ffb444b3da
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
xlsxwriter/test/worksheet/test_write_sheet_format_pr.py
xiaolanmeng86/XlsxWriter
6c3ea23a410e8216eab8f5751e5544ffb444b3da
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2021, John McNamara, jmcnamara@cpan.org # import unittest from ...compatibility import StringIO from ...worksheet import Worksheet class TestWriteSheetFormatPr(unittest.TestCase): """ Test the Worksheet _write_sheet_format_pr() method. """ def setUp(self): self.fh = StringIO() self.worksheet = Worksheet() self.worksheet._set_filehandle(self.fh) def test_write_sheet_format_pr(self): """Test the _write_sheet_format_pr() method""" self.worksheet._write_sheet_format_pr() exp = """<sheetFormatPr defaultRowHeight="15"/>""" got = self.fh.getvalue() self.assertEqual(got, exp)
24.181818
79
0.602757
true
true
f711d89cf242da66c3ff5569e7204d04c896b8f8
2,089
py
Python
google/ads/google_ads/v1/proto/services/location_view_service_pb2_grpc.py
jwygoda/google-ads-python
863892b533240cb45269d9c2cceec47e2c5a8b68
[ "Apache-2.0" ]
null
null
null
google/ads/google_ads/v1/proto/services/location_view_service_pb2_grpc.py
jwygoda/google-ads-python
863892b533240cb45269d9c2cceec47e2c5a8b68
[ "Apache-2.0" ]
null
null
null
google/ads/google_ads/v1/proto/services/location_view_service_pb2_grpc.py
jwygoda/google-ads-python
863892b533240cb45269d9c2cceec47e2c5a8b68
[ "Apache-2.0" ]
null
null
null
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc from google.ads.google_ads.v1.proto.resources import location_view_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_location__view__pb2 from google.ads.google_ads.v1.proto.services import location_view_service_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_location__view__service__pb2 class LocationViewServiceStub(object): """Service to fetch location views. """ def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.GetLocationView = channel.unary_unary( '/google.ads.googleads.v1.services.LocationViewService/GetLocationView', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_location__view__service__pb2.GetLocationViewRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_location__view__pb2.LocationView.FromString, ) class LocationViewServiceServicer(object): """Service to fetch location views. """ def GetLocationView(self, request, context): """Returns the requested location view in full detail. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_LocationViewServiceServicer_to_server(servicer, server): rpc_method_handlers = { 'GetLocationView': grpc.unary_unary_rpc_method_handler( servicer.GetLocationView, request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_location__view__service__pb2.GetLocationViewRequest.FromString, response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_location__view__pb2.LocationView.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'google.ads.googleads.v1.services.LocationViewService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
43.520833
169
0.812829
import grpc from google.ads.google_ads.v1.proto.resources import location_view_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_location__view__pb2 from google.ads.google_ads.v1.proto.services import location_view_service_pb2 as google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_location__view__service__pb2 class LocationViewServiceStub(object): def __init__(self, channel): self.GetLocationView = channel.unary_unary( '/google.ads.googleads.v1.services.LocationViewService/GetLocationView', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_location__view__service__pb2.GetLocationViewRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_location__view__pb2.LocationView.FromString, ) class LocationViewServiceServicer(object): def GetLocationView(self, request, context): context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_LocationViewServiceServicer_to_server(servicer, server): rpc_method_handlers = { 'GetLocationView': grpc.unary_unary_rpc_method_handler( servicer.GetLocationView, request_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_location__view__service__pb2.GetLocationViewRequest.FromString, response_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_location__view__pb2.LocationView.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'google.ads.googleads.v1.services.LocationViewService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,))
true
true
f711d9b850675003d1af9b25eba4e09079c0755f
201
py
Python
mettingapp/mettingapp/doctype/test/test_test.py
hieudola/git
732c5ce1c3104d997e51e170266669dd49adbc2c
[ "MIT" ]
null
null
null
mettingapp/mettingapp/doctype/test/test_test.py
hieudola/git
732c5ce1c3104d997e51e170266669dd49adbc2c
[ "MIT" ]
null
null
null
mettingapp/mettingapp/doctype/test/test_test.py
hieudola/git
732c5ce1c3104d997e51e170266669dd49adbc2c
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2018, frappe and Contributors # See license.txt from __future__ import unicode_literals import frappe import unittest class TestTest(unittest.TestCase): pass
18.272727
45
0.761194
from __future__ import unicode_literals import frappe import unittest class TestTest(unittest.TestCase): pass
true
true
f711d9cea94806cf0e1017d5f400a33609a5c611
5,488
py
Python
doc/conf.py
timmykuo/mitpipeline
3021bc4e4eb7b2e8dace736b743717bcd5497442
[ "MIT" ]
2
2019-06-10T01:57:48.000Z
2019-11-18T14:50:38.000Z
doc/conf.py
timmykuo/mitpipeline
3021bc4e4eb7b2e8dace736b743717bcd5497442
[ "MIT" ]
2
2019-07-15T09:10:35.000Z
2019-07-15T18:10:31.000Z
doc/conf.py
timmykuo/mitpipeline
3021bc4e4eb7b2e8dace736b743717bcd5497442
[ "MIT" ]
2
2019-06-10T21:00:48.000Z
2019-07-15T03:57:07.000Z
# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # # This file does only contain a selection of the most common options. For a # full list see the documentation: # http://www.sphinx-doc.org/en/master/config # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys if os.environ.get('READTHEDOCS', None) == 'True': # Run sphinx-apidoc automatically in readthedocs # Taken from this: https://lists.torproject.org/pipermail/tor-commits/2012-September/046695.html os.system('sphinx-apidoc -o api -T ../mitopipeline --separate') sys.path.insert(0, os.path.abspath(os.path.pardir)) # -- Project information ----------------------------------------------------- project = u'mitopipeline' copyright = u'2019, Timothy Kuo' author = u'Timothy Kuo' # The short X.Y version version = u'' # The full version, including alpha/beta/rc tags release = u'0.1' # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store', 'README.rst'] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. # # The default sidebars (for documents that don't match any pattern) are # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'mitopipelinedoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'mitopipeline.tex', u'mitopipeline Documentation', u'Timothy Kuo', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'mitopipeline', u'mitopipeline Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'mitopipeline', u'mitopipeline Documentation', author, 'mitopipeline', 'One line description of project.', 'Miscellaneous'), ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html']
30.488889
100
0.660532
import os import sys if os.environ.get('READTHEDOCS', None) == 'True': os.system('sphinx-apidoc -o api -T ../mitopipeline --separate') sys.path.insert(0, os.path.abspath(os.path.pardir)) project = u'mitopipeline' copyright = u'2019, Timothy Kuo' author = u'Timothy Kuo' version = u'' release = u'0.1' extensions = [ ] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' language = None exclude_patterns = [u'_build', 'Thumbs.db', '.DS_Store', 'README.rst'] pygments_style = 'sphinx' html_theme = 'sphinx_rtd_theme' html_static_path = ['_static'] # defined by theme itself. Builtin themes are using these templates by # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', # 'searchbox.html']``. # # html_sidebars = {} # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = 'mitopipelinedoc' # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'mitopipeline.tex', u'mitopipeline Documentation', u'Timothy Kuo', 'manual'), ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'mitopipeline', u'mitopipeline Documentation', [author], 1) ] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'mitopipeline', u'mitopipeline Documentation', author, 'mitopipeline', 'One line description of project.', 'Miscellaneous'), ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html']
true
true
f711da7d70558fb2381e412775be6dba3b75cf0b
852
py
Python
models/storemodel.py
vitahoang/learn-flash
8fad7cc66432140f8861aa2bdf23096dc6c0f23a
[ "MIT" ]
null
null
null
models/storemodel.py
vitahoang/learn-flash
8fad7cc66432140f8861aa2bdf23096dc6c0f23a
[ "MIT" ]
null
null
null
models/storemodel.py
vitahoang/learn-flash
8fad7cc66432140f8861aa2bdf23096dc6c0f23a
[ "MIT" ]
null
null
null
import sqlite3 from db import db class StoreModel(db.Model): __tablename__ = 'stores' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(80)) items = db.relationship('ItemModel', lazy='dynamic') def __init__(self, _id, name): self.id = _id self.name = name def json_items(self): return {'id': self.id, 'name': self.name, 'item': [item.json() for item in self.items.all()]} def json(self): return {'id': self.id, 'name': self.name} @classmethod def find_by_name(cls, name): return cls.query.filter_by(name=name).first() # SELECT * from __tablename__ WHERE name=name LIMIT 1 def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): db.session.delete(self) db.session.commit()
26.625
108
0.625587
import sqlite3 from db import db class StoreModel(db.Model): __tablename__ = 'stores' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(80)) items = db.relationship('ItemModel', lazy='dynamic') def __init__(self, _id, name): self.id = _id self.name = name def json_items(self): return {'id': self.id, 'name': self.name, 'item': [item.json() for item in self.items.all()]} def json(self): return {'id': self.id, 'name': self.name} @classmethod def find_by_name(cls, name): return cls.query.filter_by(name=name).first() def save_to_db(self): db.session.add(self) db.session.commit() def delete_from_db(self): db.session.delete(self) db.session.commit()
true
true
f711dadecdd59577c69f15e06fce58b3de92566e
1,532
py
Python
nas4candle/nasapi/evaluator/process_data.py
scrlnas2019/nas4candle
318959424cc66819c816054a87bd1cb5d426e2e7
[ "BSD-3-Clause" ]
1
2021-01-22T04:03:00.000Z
2021-01-22T04:03:00.000Z
nas4candle/nasapi/evaluator/process_data.py
scrlnas2019/nas4candle
318959424cc66819c816054a87bd1cb5d426e2e7
[ "BSD-3-Clause" ]
1
2021-01-23T00:14:17.000Z
2021-01-23T00:14:17.000Z
nas4candle/nasapi/evaluator/process_data.py
scrlnas2019/nas4candle
318959424cc66819c816054a87bd1cb5d426e2e7
[ "BSD-3-Clause" ]
2
2019-11-27T04:42:00.000Z
2021-01-22T04:06:59.000Z
import sys import json import datetime from terminalplot import plot from balsam.launcher.dag import BalsamJob now = '_'.join(str(datetime.datetime.now(datetime.timezone.utc)).split(" ")) def max_list(l): rl = [l[0]] mx = l[0] for i in range(1, len(l)): mx = max(mx, l[i]) rl.append(mx) return rl def rm_none(l): return list(filter(lambda e: e != None, list(l))) def process_data(workflow): data = BalsamJob.objects.filter(workflow=workflow).values_list('data__reward', flat=True) print(f'data len: {len(data)}') raw_rewards = list(filter(lambda e: e != None, rm_none(data))) if len(raw_rewards) == 0: print(f'no rewards for : {workflow}') return -1 plot([i for i in range(len(raw_rewards))], raw_rewards) max_rewards = max_list(raw_rewards) plot([i for i in range(len(max_rewards))], max_rewards) data = BalsamJob.objects.filter(workflow=workflow).values_list('data__arch_seq', flat=True) arch_seq = rm_none(data) data = BalsamJob.objects.filter(workflow=workflow).values_list('data__id_worker', flat=True) w = rm_none(data) filename = f'wf-{workflow}_{now}' print(f'filename: {filename}') with open('data/'+filename+'.json', "w") as f: data = dict( fig=filename, raw_rewards=raw_rewards, max_rewards=max_rewards, arch_seq=arch_seq, id_worker=w ) json.dump(data, f) return 0 for wf in sys.argv[1:]: process_data(wf)
27.854545
96
0.633812
import sys import json import datetime from terminalplot import plot from balsam.launcher.dag import BalsamJob now = '_'.join(str(datetime.datetime.now(datetime.timezone.utc)).split(" ")) def max_list(l): rl = [l[0]] mx = l[0] for i in range(1, len(l)): mx = max(mx, l[i]) rl.append(mx) return rl def rm_none(l): return list(filter(lambda e: e != None, list(l))) def process_data(workflow): data = BalsamJob.objects.filter(workflow=workflow).values_list('data__reward', flat=True) print(f'data len: {len(data)}') raw_rewards = list(filter(lambda e: e != None, rm_none(data))) if len(raw_rewards) == 0: print(f'no rewards for : {workflow}') return -1 plot([i for i in range(len(raw_rewards))], raw_rewards) max_rewards = max_list(raw_rewards) plot([i for i in range(len(max_rewards))], max_rewards) data = BalsamJob.objects.filter(workflow=workflow).values_list('data__arch_seq', flat=True) arch_seq = rm_none(data) data = BalsamJob.objects.filter(workflow=workflow).values_list('data__id_worker', flat=True) w = rm_none(data) filename = f'wf-{workflow}_{now}' print(f'filename: {filename}') with open('data/'+filename+'.json', "w") as f: data = dict( fig=filename, raw_rewards=raw_rewards, max_rewards=max_rewards, arch_seq=arch_seq, id_worker=w ) json.dump(data, f) return 0 for wf in sys.argv[1:]: process_data(wf)
true
true
f711daec253f0a9a5300ad42651a97d52e285afa
559
py
Python
PythonAPI/carissma_project/lib/python3.5/site-packages/mpl_toolkits/axes_grid/__init__.py
AbdulHoffmann/carla_carissma
8d382769ffa02a6c61a22c57160285505f5ff0a4
[ "MIT" ]
445
2019-01-26T13:50:26.000Z
2022-03-18T05:17:38.000Z
venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/__init__.py
John1001Song/Big-Data-Robo-Adviser
9444dce96954c546333d5aecc92a06c3bfd19aa5
[ "MIT" ]
242
2019-01-29T15:48:27.000Z
2022-03-31T22:09:21.000Z
venv/lib/python3.7/site-packages/mpl_toolkits/axes_grid/__init__.py
John1001Song/Big-Data-Robo-Adviser
9444dce96954c546333d5aecc92a06c3bfd19aa5
[ "MIT" ]
64
2018-04-25T08:51:57.000Z
2022-01-29T14:13:57.000Z
from . import axes_size as Size from .axes_divider import Divider, SubplotDivider, LocatableAxes, \ make_axes_locatable from .axes_grid import Grid, ImageGrid, AxesGrid #from axes_divider import make_axes_locatable from matplotlib.cbook import warn_deprecated warn_deprecated(since='2.1', name='mpl_toolkits.axes_grid', alternative='mpl_toolkits.axes_grid1 and' ' mpl_toolkits.axisartist, which provide' ' the same functionality', obj_type='module')
43
69
0.665474
from . import axes_size as Size from .axes_divider import Divider, SubplotDivider, LocatableAxes, \ make_axes_locatable from .axes_grid import Grid, ImageGrid, AxesGrid from matplotlib.cbook import warn_deprecated warn_deprecated(since='2.1', name='mpl_toolkits.axes_grid', alternative='mpl_toolkits.axes_grid1 and' ' mpl_toolkits.axisartist, which provide' ' the same functionality', obj_type='module')
true
true
f711dc7f11c9c664766121e41afbc2c4dcf3ea5a
742
py
Python
python/day005.py
jrrickerson/adventofcode2019
978f70f7808539b2a8ea84336f9743c83f9bf49f
[ "MIT" ]
1
2020-02-07T03:04:01.000Z
2020-02-07T03:04:01.000Z
python/day005.py
jrrickerson/adventofcode2019
978f70f7808539b2a8ea84336f9743c83f9bf49f
[ "MIT" ]
null
null
null
python/day005.py
jrrickerson/adventofcode2019
978f70f7808539b2a8ea84336f9743c83f9bf49f
[ "MIT" ]
1
2020-02-07T03:04:03.000Z
2020-02-07T03:04:03.000Z
import intcode INPUT_FILE = 'day005.in' def part1(filename): source = intcode.load_from_file(filename) i, o = [], [] # AC Unit input value i.append(1) modified = intcode.run_intcode(source, i, o) return modified[0], i, o def part2(filename): source = intcode.load_from_file(filename) i, o = [], [] # Thermal Radiator Controller i.append(5) modified = intcode.run_intcode(source, i, o) return modified[0], i, o if __name__ == '__main__': result_code, instream, outstream = part1(INPUT_FILE) print('Part1: ', result_code, 'IN', instream, 'OUT', outstream) result_code, instream, outstream = part2(INPUT_FILE) print('Part2: ', result_code, 'IN', instream, 'OUT', outstream)
25.586207
67
0.654987
import intcode INPUT_FILE = 'day005.in' def part1(filename): source = intcode.load_from_file(filename) i, o = [], [] i.append(1) modified = intcode.run_intcode(source, i, o) return modified[0], i, o def part2(filename): source = intcode.load_from_file(filename) i, o = [], [] i.append(5) modified = intcode.run_intcode(source, i, o) return modified[0], i, o if __name__ == '__main__': result_code, instream, outstream = part1(INPUT_FILE) print('Part1: ', result_code, 'IN', instream, 'OUT', outstream) result_code, instream, outstream = part2(INPUT_FILE) print('Part2: ', result_code, 'IN', instream, 'OUT', outstream)
true
true
f711dca3e3f0da5ab4a025c63c872b7d0c7f2c3b
3,282
py
Python
armyguys/aws/subnet.py
jtpaasch/armyguys
4f138420ba42d6d9864176a87c709af014034429
[ "MIT" ]
null
null
null
armyguys/aws/subnet.py
jtpaasch/armyguys
4f138420ba42d6d9864176a87c709af014034429
[ "MIT" ]
null
null
null
armyguys/aws/subnet.py
jtpaasch/armyguys
4f138420ba42d6d9864176a87c709af014034429
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Utilities for working with VPC subnets.""" from . import client as boto3client def create(profile, cidr_block, vpc, availability_zone=None): """Create a subnet in a VPC. Args: profile A profile to connect to AWS with. cidr_block The network range for the subnet, in CIDR notation. For instance, "10.0.0.0/24". vpc The ID of the VPC you want to create the subnet in. availability_zone The name of the availability zone to create the subnet in. If None, Amazon will pick one for you. Returns: The JSON response returned by boto3. """ client = boto3client.get("ec2", profile) params = {} params["CidrBlock"] = cidr_block params["VpcId"] = vpc if availability_zone: params["AvailabilityZone"] = availability_zone return client.create_subnet(**params) def delete(profile, subnet): """Delete a subnet from a VPC. Args: profile A profile to connect to AWS with. subnet The ID of the subnet you want to delete. """ client = boto3client.get("ec2", profile) params = {} params["SubnetId"] = subnet return client.delete_subnet(**params) def get(profile, filters=None): """Get a list of all subnets. Args: profile A profile to connect to AWS with. filters Filters to apply to the request. Returns: The JSON response returned by boto3. """ client = boto3client.get("ec2", profile) params = {} if filters: params["Filters"] = filters return client.describe_subnets(**params) def enable_public_ips(profile, subnet): """Set the subnet to give instances public IPs by default. Args: profile A profile to connect to AWS with. subnet The ID of the subnet. Returns: The JSON response returned by boto3. """ client = boto3client.get("ec2", profile) params = {} params["SubnetId"] = subnet params["MapPublicIpOnLaunch"] = {"Value": True} return client.modify_subnet_attribute(**params) def disable_public_ips(profile, subnet): """Set the subnet not to give instances public IPs by default. Args: profile A profile to connect to AWS with. subnet The ID of the subnet. Returns: The JSON response returned by boto3. """ client = boto3client.get("ec2", profile) params = {} params["SubnetId"] = subnet params["MapPublicIpOnLaunch"] = {"Value": False} return client.modify_subnet_attribute(**params) def tag(profile, subnet, key, value): """Add a tag to a subnet. Args: profile A profile to connect to AWS with. subnet The ID of the subnet you want to tag. key The key/name of the tag. value The value of the tag. Returns: The response returned by boto3. """ client = boto3client.get("ec2", profile) params = {} params["Resources"] = [subnet] params["Tags"] = [{"Key": key, "Value": value}] return client.create_tags(**params)
21.88
70
0.595064
from . import client as boto3client def create(profile, cidr_block, vpc, availability_zone=None): client = boto3client.get("ec2", profile) params = {} params["CidrBlock"] = cidr_block params["VpcId"] = vpc if availability_zone: params["AvailabilityZone"] = availability_zone return client.create_subnet(**params) def delete(profile, subnet): client = boto3client.get("ec2", profile) params = {} params["SubnetId"] = subnet return client.delete_subnet(**params) def get(profile, filters=None): client = boto3client.get("ec2", profile) params = {} if filters: params["Filters"] = filters return client.describe_subnets(**params) def enable_public_ips(profile, subnet): client = boto3client.get("ec2", profile) params = {} params["SubnetId"] = subnet params["MapPublicIpOnLaunch"] = {"Value": True} return client.modify_subnet_attribute(**params) def disable_public_ips(profile, subnet): client = boto3client.get("ec2", profile) params = {} params["SubnetId"] = subnet params["MapPublicIpOnLaunch"] = {"Value": False} return client.modify_subnet_attribute(**params) def tag(profile, subnet, key, value): client = boto3client.get("ec2", profile) params = {} params["Resources"] = [subnet] params["Tags"] = [{"Key": key, "Value": value}] return client.create_tags(**params)
true
true
f711dd398300cfead29056966de347f4614f3052
20,794
py
Python
tools/gn.py
fossabot/sdk
938c939554d00c87d4fd1d1647d965e20b5abe79
[ "BSD-Source-Code" ]
1
2021-05-08T18:02:11.000Z
2021-05-08T18:02:11.000Z
tools/gn.py
fossabot/sdk
938c939554d00c87d4fd1d1647d965e20b5abe79
[ "BSD-Source-Code" ]
null
null
null
tools/gn.py
fossabot/sdk
938c939554d00c87d4fd1d1647d965e20b5abe79
[ "BSD-Source-Code" ]
null
null
null
#!/usr/bin/env python3 # Copyright 2016 The Dart project authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse import os import subprocess import sys import time import utils HOST_OS = utils.GuessOS() HOST_ARCH = utils.GuessArchitecture() SCRIPT_DIR = os.path.dirname(sys.argv[0]) DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..')) AVAILABLE_ARCHS = utils.ARCH_FAMILY.keys() GN = os.path.join(DART_ROOT, 'buildtools', 'gn') # Environment variables for default settings. DART_USE_TOOLCHAIN = "DART_USE_TOOLCHAIN" # Use instread of --toolchain-prefix DART_USE_SYSROOT = "DART_USE_SYSROOT" # Use instead of --target-sysroot DART_USE_CRASHPAD = "DART_USE_CRASHPAD" # Use instead of --use-crashpad # use instead of --platform-sdk DART_MAKE_PLATFORM_SDK = "DART_MAKE_PLATFORM_SDK" DART_GN_ARGS = "DART_GN_ARGS" def ToolchainPrefix(args): if args.toolchain_prefix: return args.toolchain_prefix return os.environ.get(DART_USE_TOOLCHAIN) def TargetSysroot(args): if args.target_sysroot: return args.target_sysroot return os.environ.get(DART_USE_SYSROOT) def MakePlatformSDK(): return DART_MAKE_PLATFORM_SDK in os.environ def GetGNArgs(args): if args.gn_args != None: return args.gn_args args = os.environ.get(DART_GN_ARGS) or "" return args.split() def GetOutDir(mode, arch, target_os, sanitizer): return utils.GetBuildRoot(HOST_OS, mode, arch, target_os, sanitizer) def ToCommandLine(gn_args): def merge(key, value): if type(value) is bool: return '%s=%s' % (key, 'true' if value else 'false') elif type(value) is int: return '%s=%d' % (key, value) return '%s="%s"' % (key, value) return [merge(x, y) for x, y in gn_args.items()] def HostCpuForArch(arch): if arch in ['ia32', 'arm', 'armv6', 'simarm', 'simarmv6', 'simarm_x64']: return 'x86' if arch in [ 'x64', 'arm64', 'simarm64', 'arm_x64', 'x64c', 'arm64c', 'simarm64c' ]: return 'x64' # The C compiler's target. def TargetCpuForArch(arch, target_os): if arch in ['ia32', 'simarm', 'simarmv6']: return 'x86' if arch in ['x64', 'simarm64', 'simarm_x64', 'x64c', 'simarm64c']: return 'x64' if arch == 'arm_x64': return 'arm' if arch == 'arm64c': return 'arm64' return arch # The Dart compiler's target. def DartTargetCpuForArch(arch): if arch in ['ia32']: return 'ia32' if arch in ['x64', 'x64c']: return 'x64' if arch in ['arm', 'simarm', 'simarm_x64', 'arm_x64']: return 'arm' if arch in ['armv6', 'simarmv6']: return 'armv6' if arch in ['arm64', 'simarm64', 'arm64c', 'simarm64c']: return 'arm64' return arch def IsCompressedPointerArch(arch): return arch in ['x64c', 'arm64c', 'simarm64c'] def HostOsForGn(host_os): if host_os.startswith('macos'): return 'mac' if host_os.startswith('win'): return 'win' return host_os # Where string_map is formatted as X1=Y1,X2=Y2 etc. # If key is X1, returns Y1. def ParseStringMap(key, string_map): for m in string_map.split(','): l = m.split('=') if l[0] == key: return l[1] return None def UseSysroot(args, gn_args): # Don't try to use a Linux sysroot if we aren't on Linux. if gn_args['target_os'] != 'linux' and HOST_OS != 'linux': return False # Don't use the sysroot if we're given another sysroot. if TargetSysroot(args): return False # Our Debian Jesse sysroot doesn't work with GCC 9 if not gn_args['is_clang']: return False # Our Debian Jesse sysroot has incorrect annotations on realloc. if gn_args['is_ubsan']: return False # Otherwise use the sysroot. return True def ToGnArgs(args, mode, arch, target_os, sanitizer, verify_sdk_hash): gn_args = {} host_os = HostOsForGn(HOST_OS) if target_os == 'host': gn_args['target_os'] = host_os else: gn_args['target_os'] = target_os gn_args['host_cpu'] = HostCpuForArch(arch) gn_args['target_cpu'] = TargetCpuForArch(arch, target_os) gn_args['dart_target_arch'] = DartTargetCpuForArch(arch) gn_args['dart_use_compressed_pointers'] = IsCompressedPointerArch(arch) # Configure Crashpad library if it is used. gn_args['dart_use_crashpad'] = (args.use_crashpad or DART_USE_CRASHPAD in os.environ) if gn_args['dart_use_crashpad']: # Tell Crashpad's BUILD files which checkout layout to use. gn_args['crashpad_dependencies'] = 'dart' if DartTargetCpuForArch(arch) != HostCpuForArch(arch): # Training an app-jit snapshot under a simulator is slow. Use script # snapshots instead. gn_args['dart_snapshot_kind'] = 'kernel' else: gn_args['dart_snapshot_kind'] = 'app-jit' # We only want the fallback root certs in the standalone VM on # Linux and Windows. if gn_args['target_os'] in ['linux', 'win']: gn_args['dart_use_fallback_root_certificates'] = True # Use tcmalloc only when targeting Linux and when not using ASAN. gn_args['dart_use_tcmalloc'] = ((gn_args['target_os'] == 'linux') and sanitizer == 'none') if gn_args['target_os'] == 'linux': if gn_args['target_cpu'] == 'arm': # Default to -mfloat-abi=hard and -mfpu=neon for arm on Linux as we're # specifying a gnueabihf compiler in //build/toolchain/linux/BUILD.gn. floatabi = 'hard' if args.arm_float_abi == '' else args.arm_float_abi gn_args['arm_version'] = 7 gn_args['arm_float_abi'] = floatabi gn_args['arm_use_neon'] = True elif gn_args['target_cpu'] == 'armv6': floatabi = 'softfp' if args.arm_float_abi == '' else args.arm_float_abi gn_args['target_cpu'] = 'arm' gn_args['arm_version'] = 6 gn_args['arm_float_abi'] = floatabi gn_args['is_debug'] = mode == 'debug' gn_args['is_release'] = mode == 'release' gn_args['is_product'] = mode == 'product' gn_args['dart_debug'] = mode == 'debug' # This setting is only meaningful for Flutter. Standalone builds of the VM # should leave this set to 'develop', which causes the build to defer to # 'is_debug', 'is_release' and 'is_product'. if mode == 'product': gn_args['dart_runtime_mode'] = 'release' else: gn_args['dart_runtime_mode'] = 'develop' gn_args['exclude_kernel_service'] = args.exclude_kernel_service gn_args['is_clang'] = args.clang enable_code_coverage = args.code_coverage and gn_args['is_clang'] gn_args['dart_vm_code_coverage'] = enable_code_coverage gn_args['is_asan'] = sanitizer == 'asan' gn_args['is_lsan'] = sanitizer == 'lsan' gn_args['is_msan'] = sanitizer == 'msan' gn_args['is_tsan'] = sanitizer == 'tsan' gn_args['is_ubsan'] = sanitizer == 'ubsan' gn_args['is_qemu'] = args.use_qemu if not args.platform_sdk: gn_args['dart_platform_sdk'] = args.platform_sdk # We don't support stripping on Windows if host_os != 'win': gn_args['dart_stripped_binary'] = 'exe.stripped/dart' gn_args['dart_precompiled_runtime_stripped_binary'] = ( 'exe.stripped/dart_precompiled_runtime_product') gn_args['gen_snapshot_stripped_binary'] = ( 'exe.stripped/gen_snapshot_product') # Setup the user-defined sysroot. if UseSysroot(args, gn_args): gn_args['dart_use_debian_sysroot'] = True else: sysroot = TargetSysroot(args) if sysroot: gn_args['target_sysroot'] = ParseStringMap(arch, sysroot) toolchain = ToolchainPrefix(args) if toolchain: gn_args['toolchain_prefix'] = ParseStringMap(arch, toolchain) goma_dir = os.environ.get('GOMA_DIR') # Search for goma in depot_tools in path goma_depot_tools_dir = None for path in os.environ.get('PATH', '').split(os.pathsep): if os.path.basename(path) == 'depot_tools': cipd_bin = os.path.join(path, '.cipd_bin') if os.path.isfile(os.path.join(cipd_bin, 'gomacc')): goma_depot_tools_dir = cipd_bin break # Otherwise use goma from home directory. # TODO(whesse): Remove support for goma installed in home directory. # Goma will only be distributed through depot_tools. goma_home_dir = os.path.join(os.getenv('HOME', ''), 'goma') if args.goma and goma_dir: gn_args['use_goma'] = True gn_args['goma_dir'] = goma_dir elif args.goma and goma_depot_tools_dir: gn_args['use_goma'] = True gn_args['goma_dir'] = goma_depot_tools_dir elif args.goma and os.path.exists(goma_home_dir): gn_args['use_goma'] = True gn_args['goma_dir'] = goma_home_dir else: gn_args['use_goma'] = False gn_args['goma_dir'] = None if gn_args['target_os'] == 'mac' and gn_args['use_goma']: gn_args['mac_use_goma_rbe'] = True # Code coverage requires -O0 to be set. if enable_code_coverage: gn_args['dart_debug_optimization_level'] = 0 gn_args['debug_optimization_level'] = 0 elif args.debug_opt_level: gn_args['dart_debug_optimization_level'] = args.debug_opt_level gn_args['debug_optimization_level'] = args.debug_opt_level gn_args['verify_sdk_hash'] = verify_sdk_hash return gn_args def ProcessOsOption(os_name): if os_name == 'host': return HOST_OS return os_name def ProcessOptions(args): if args.arch == 'all': args.arch = 'ia32,x64,simarm,simarm64,x64c,simarm64c' if args.mode == 'all': args.mode = 'debug,release,product' if args.os == 'all': args.os = 'host,android,fuchsia' if args.sanitizer == 'all': args.sanitizer = 'none,asan,lsan,msan,tsan,ubsan' args.mode = args.mode.split(',') args.arch = args.arch.split(',') args.os = args.os.split(',') args.sanitizer = args.sanitizer.split(',') for mode in args.mode: if not mode in ['debug', 'release', 'product']: print("Unknown mode %s" % mode) return False for i, arch in enumerate(args.arch): if not arch in AVAILABLE_ARCHS: # Normalise to lower case form to make it less case-picky. arch_lower = arch.lower() if arch_lower in AVAILABLE_ARCHS: args.arch[i] = arch_lower continue print("Unknown arch %s" % arch) return False oses = [ProcessOsOption(os_name) for os_name in args.os] for os_name in oses: if not os_name in [ 'android', 'freebsd', 'linux', 'macos', 'win32', 'fuchsia' ]: print("Unknown os %s" % os_name) return False if os_name == 'android': if not HOST_OS in ['linux', 'macos']: print( "Cross-compilation to %s is not supported on host os %s." % (os_name, HOST_OS)) return False if not arch in [ 'ia32', 'x64', 'arm', 'arm_x64', 'armv6', 'arm64', 'x64c', 'arm64c' ]: print( "Cross-compilation to %s is not supported for architecture %s." % (os_name, arch)) return False elif os_name == 'fuchsia': if HOST_OS != 'linux': print( "Cross-compilation to %s is not supported on host os %s." % (os_name, HOST_OS)) return False if not arch in ['x64', 'arm64', 'x64c', 'arm64c']: print( "Cross-compilation to %s is not supported for architecture %s." % (os_name, arch)) return False elif os_name != HOST_OS: print("Unsupported target os %s" % os_name) return False if HOST_OS != 'win' and args.use_crashpad: print("Crashpad is only supported on Windows") return False return True def os_has_ide(host_os): return host_os.startswith('win') or host_os.startswith('mac') def ide_switch(host_os): if host_os.startswith('win'): return '--ide=vs' elif host_os.startswith('mac'): return '--ide=xcode' else: return '--ide=json' def AddCommonGnOptionArgs(parser): """Adds arguments that will change the default GN arguments.""" parser.add_argument('--goma', help='Use goma', action='store_true') parser.add_argument('--no-goma', help='Disable goma', dest='goma', action='store_false') parser.set_defaults(goma=True) parser.add_argument('--verify-sdk-hash', help='Enable SDK hash checks (default)', dest='verify_sdk_hash', action='store_true') parser.add_argument('-nvh', '--no-verify-sdk-hash', help='Disable SDK hash checks', dest='verify_sdk_hash', action='store_false') parser.set_defaults(verify_sdk_hash=True) parser.add_argument('--clang', help='Use Clang', action='store_true') parser.add_argument('--no-clang', help='Disable Clang', dest='clang', action='store_false') parser.set_defaults(clang=True) parser.add_argument( '--platform-sdk', help='Directs the create_sdk target to create a smaller "Platform" SDK', default=MakePlatformSDK(), action='store_true') parser.add_argument('--use-crashpad', default=False, dest='use_crashpad', action='store_true') parser.add_argument('--use-qemu', default=False, dest='use_qemu', action='store_true') parser.add_argument('--exclude-kernel-service', help='Exclude the kernel service.', default=False, dest='exclude_kernel_service', action='store_true') parser.add_argument('--arm-float-abi', type=str, help='The ARM float ABI (soft, softfp, hard)', metavar='[soft,softfp,hard]', default='') parser.add_argument('--code-coverage', help='Enable code coverage for the standalone VM', default=False, dest="code_coverage", action='store_true') parser.add_argument('--debug-opt-level', '-d', help='The optimization level to use for debug builds', type=str) parser.add_argument('--gn-args', help='Set extra GN args', dest='gn_args', action='append') parser.add_argument( '--toolchain-prefix', '-t', type=str, help='Comma-separated list of arch=/path/to/toolchain-prefix mappings') parser.add_argument('--ide', help='Generate an IDE file.', default=os_has_ide(HOST_OS), action='store_true') parser.add_argument('--export-compile-commands', help='Export compile_commands.json database file.', default=False, action='store_true') parser.add_argument( '--target-sysroot', '-s', type=str, help='Comma-separated list of arch=/path/to/sysroot mappings') def AddCommonConfigurationArgs(parser): """Adds arguments that influence which configuration will be built.""" parser.add_argument("-a", "--arch", type=str, help='Target architectures (comma-separated).', metavar='[all,' + ','.join(AVAILABLE_ARCHS) + ']', default=utils.GuessArchitecture()) parser.add_argument('--mode', '-m', type=str, help='Build variants (comma-separated).', metavar='[all,debug,release,product]', default='debug') parser.add_argument('--os', type=str, help='Target OSs (comma-separated).', metavar='[all,host,android,fuchsia]', default='host') parser.add_argument('--sanitizer', type=str, help='Build variants (comma-separated).', metavar='[all,none,asan,lsan,msan,tsan,ubsan]', default='none') def AddOtherArgs(parser): """Adds miscellaneous arguments to the parser.""" parser.add_argument("-v", "--verbose", help='Verbose output.', default=False, action="store_true") def parse_args(args): args = args[1:] parser = argparse.ArgumentParser( description='A script to run `gn gen`.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) config_group = parser.add_argument_group('Configuration Related Arguments') AddCommonConfigurationArgs(config_group) gn_group = parser.add_argument_group('GN Related Arguments') AddCommonGnOptionArgs(gn_group) other_group = parser.add_argument_group('Other Arguments') AddOtherArgs(other_group) options = parser.parse_args(args) if not ProcessOptions(options): parser.print_help() return None return options def BuildGnCommand(args, mode, arch, target_os, sanitizer, out_dir): gn = os.path.join(DART_ROOT, 'buildtools', 'gn.exe' if utils.IsWindows() else 'gn') if not os.path.isfile(gn): raise Exception("Couldn't find the gn binary at path: " + gn) # TODO(infra): Re-enable --check. Many targets fail to use # public_deps to re-expose header files to their dependents. # See dartbug.com/32364 command = [gn, 'gen', out_dir] gn_args = ToCommandLine( ToGnArgs(args, mode, arch, target_os, sanitizer, args.verify_sdk_hash)) gn_args += GetGNArgs(args) if args.ide: command.append(ide_switch(HOST_OS)) if args.export_compile_commands: command.append('--export-compile-commands') command.append('--args=%s' % ' '.join(gn_args)) return command def RunGnOnConfiguredConfigurations(args): commands = [] for target_os in args.os: for mode in args.mode: for arch in args.arch: for sanitizer in args.sanitizer: out_dir = GetOutDir(mode, arch, target_os, sanitizer) commands.append( BuildGnCommand(args, mode, arch, target_os, sanitizer, out_dir)) if args.verbose: print("gn gen --check in %s" % out_dir) active_commands = [] def cleanup(command): print("Command failed: " + ' '.join(command)) for (_, process) in active_commands: process.terminate() for command in commands: try: process = subprocess.Popen(command, cwd=DART_ROOT) active_commands.append([command, process]) except Exception as e: print('Error: %s' % e) cleanup(command) return 1 while active_commands: time.sleep(0.1) for active_command in active_commands: (command, process) = active_command if process.poll() is not None: active_commands.remove(active_command) if process.returncode != 0: cleanup(command) return 1 return 0 def Main(argv): starttime = time.time() args = parse_args(argv) if args is None: return 1 result = RunGnOnConfiguredConfigurations(args) if args.verbose: endtime = time.time() print("GN Time: %.3f seconds" % (endtime - starttime)) return result if __name__ == '__main__': sys.exit(Main(sys.argv))
34.889262
83
0.58387
import argparse import os import subprocess import sys import time import utils HOST_OS = utils.GuessOS() HOST_ARCH = utils.GuessArchitecture() SCRIPT_DIR = os.path.dirname(sys.argv[0]) DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..')) AVAILABLE_ARCHS = utils.ARCH_FAMILY.keys() GN = os.path.join(DART_ROOT, 'buildtools', 'gn') DART_USE_TOOLCHAIN = "DART_USE_TOOLCHAIN" DART_USE_SYSROOT = "DART_USE_SYSROOT" DART_USE_CRASHPAD = "DART_USE_CRASHPAD" DART_MAKE_PLATFORM_SDK = "DART_MAKE_PLATFORM_SDK" DART_GN_ARGS = "DART_GN_ARGS" def ToolchainPrefix(args): if args.toolchain_prefix: return args.toolchain_prefix return os.environ.get(DART_USE_TOOLCHAIN) def TargetSysroot(args): if args.target_sysroot: return args.target_sysroot return os.environ.get(DART_USE_SYSROOT) def MakePlatformSDK(): return DART_MAKE_PLATFORM_SDK in os.environ def GetGNArgs(args): if args.gn_args != None: return args.gn_args args = os.environ.get(DART_GN_ARGS) or "" return args.split() def GetOutDir(mode, arch, target_os, sanitizer): return utils.GetBuildRoot(HOST_OS, mode, arch, target_os, sanitizer) def ToCommandLine(gn_args): def merge(key, value): if type(value) is bool: return '%s=%s' % (key, 'true' if value else 'false') elif type(value) is int: return '%s=%d' % (key, value) return '%s="%s"' % (key, value) return [merge(x, y) for x, y in gn_args.items()] def HostCpuForArch(arch): if arch in ['ia32', 'arm', 'armv6', 'simarm', 'simarmv6', 'simarm_x64']: return 'x86' if arch in [ 'x64', 'arm64', 'simarm64', 'arm_x64', 'x64c', 'arm64c', 'simarm64c' ]: return 'x64' def TargetCpuForArch(arch, target_os): if arch in ['ia32', 'simarm', 'simarmv6']: return 'x86' if arch in ['x64', 'simarm64', 'simarm_x64', 'x64c', 'simarm64c']: return 'x64' if arch == 'arm_x64': return 'arm' if arch == 'arm64c': return 'arm64' return arch # The Dart compiler's target. def DartTargetCpuForArch(arch): if arch in ['ia32']: return 'ia32' if arch in ['x64', 'x64c']: return 'x64' if arch in ['arm', 'simarm', 'simarm_x64', 'arm_x64']: return 'arm' if arch in ['armv6', 'simarmv6']: return 'armv6' if arch in ['arm64', 'simarm64', 'arm64c', 'simarm64c']: return 'arm64' return arch def IsCompressedPointerArch(arch): return arch in ['x64c', 'arm64c', 'simarm64c'] def HostOsForGn(host_os): if host_os.startswith('macos'): return 'mac' if host_os.startswith('win'): return 'win' return host_os def ParseStringMap(key, string_map): for m in string_map.split(','): l = m.split('=') if l[0] == key: return l[1] return None def UseSysroot(args, gn_args): if gn_args['target_os'] != 'linux' and HOST_OS != 'linux': return False if TargetSysroot(args): return False if not gn_args['is_clang']: return False # Our Debian Jesse sysroot has incorrect annotations on realloc. if gn_args['is_ubsan']: return False # Otherwise use the sysroot. return True def ToGnArgs(args, mode, arch, target_os, sanitizer, verify_sdk_hash): gn_args = {} host_os = HostOsForGn(HOST_OS) if target_os == 'host': gn_args['target_os'] = host_os else: gn_args['target_os'] = target_os gn_args['host_cpu'] = HostCpuForArch(arch) gn_args['target_cpu'] = TargetCpuForArch(arch, target_os) gn_args['dart_target_arch'] = DartTargetCpuForArch(arch) gn_args['dart_use_compressed_pointers'] = IsCompressedPointerArch(arch) # Configure Crashpad library if it is used. gn_args['dart_use_crashpad'] = (args.use_crashpad or DART_USE_CRASHPAD in os.environ) if gn_args['dart_use_crashpad']: # Tell Crashpad's BUILD files which checkout layout to use. gn_args['crashpad_dependencies'] = 'dart' if DartTargetCpuForArch(arch) != HostCpuForArch(arch): gn_args['dart_snapshot_kind'] = 'kernel' else: gn_args['dart_snapshot_kind'] = 'app-jit' if gn_args['target_os'] in ['linux', 'win']: gn_args['dart_use_fallback_root_certificates'] = True gn_args['dart_use_tcmalloc'] = ((gn_args['target_os'] == 'linux') and sanitizer == 'none') if gn_args['target_os'] == 'linux': if gn_args['target_cpu'] == 'arm': # specifying a gnueabihf compiler in //build/toolchain/linux/BUILD.gn. floatabi = 'hard' if args.arm_float_abi == '' else args.arm_float_abi gn_args['arm_version'] = 7 gn_args['arm_float_abi'] = floatabi gn_args['arm_use_neon'] = True elif gn_args['target_cpu'] == 'armv6': floatabi = 'softfp' if args.arm_float_abi == '' else args.arm_float_abi gn_args['target_cpu'] = 'arm' gn_args['arm_version'] = 6 gn_args['arm_float_abi'] = floatabi gn_args['is_debug'] = mode == 'debug' gn_args['is_release'] = mode == 'release' gn_args['is_product'] = mode == 'product' gn_args['dart_debug'] = mode == 'debug' # This setting is only meaningful for Flutter. Standalone builds of the VM # should leave this set to 'develop', which causes the build to defer to # 'is_debug', 'is_release' and 'is_product'. if mode == 'product': gn_args['dart_runtime_mode'] = 'release' else: gn_args['dart_runtime_mode'] = 'develop' gn_args['exclude_kernel_service'] = args.exclude_kernel_service gn_args['is_clang'] = args.clang enable_code_coverage = args.code_coverage and gn_args['is_clang'] gn_args['dart_vm_code_coverage'] = enable_code_coverage gn_args['is_asan'] = sanitizer == 'asan' gn_args['is_lsan'] = sanitizer == 'lsan' gn_args['is_msan'] = sanitizer == 'msan' gn_args['is_tsan'] = sanitizer == 'tsan' gn_args['is_ubsan'] = sanitizer == 'ubsan' gn_args['is_qemu'] = args.use_qemu if not args.platform_sdk: gn_args['dart_platform_sdk'] = args.platform_sdk # We don't support stripping on Windows if host_os != 'win': gn_args['dart_stripped_binary'] = 'exe.stripped/dart' gn_args['dart_precompiled_runtime_stripped_binary'] = ( 'exe.stripped/dart_precompiled_runtime_product') gn_args['gen_snapshot_stripped_binary'] = ( 'exe.stripped/gen_snapshot_product') if UseSysroot(args, gn_args): gn_args['dart_use_debian_sysroot'] = True else: sysroot = TargetSysroot(args) if sysroot: gn_args['target_sysroot'] = ParseStringMap(arch, sysroot) toolchain = ToolchainPrefix(args) if toolchain: gn_args['toolchain_prefix'] = ParseStringMap(arch, toolchain) goma_dir = os.environ.get('GOMA_DIR') goma_depot_tools_dir = None for path in os.environ.get('PATH', '').split(os.pathsep): if os.path.basename(path) == 'depot_tools': cipd_bin = os.path.join(path, '.cipd_bin') if os.path.isfile(os.path.join(cipd_bin, 'gomacc')): goma_depot_tools_dir = cipd_bin break goma_home_dir = os.path.join(os.getenv('HOME', ''), 'goma') if args.goma and goma_dir: gn_args['use_goma'] = True gn_args['goma_dir'] = goma_dir elif args.goma and goma_depot_tools_dir: gn_args['use_goma'] = True gn_args['goma_dir'] = goma_depot_tools_dir elif args.goma and os.path.exists(goma_home_dir): gn_args['use_goma'] = True gn_args['goma_dir'] = goma_home_dir else: gn_args['use_goma'] = False gn_args['goma_dir'] = None if gn_args['target_os'] == 'mac' and gn_args['use_goma']: gn_args['mac_use_goma_rbe'] = True if enable_code_coverage: gn_args['dart_debug_optimization_level'] = 0 gn_args['debug_optimization_level'] = 0 elif args.debug_opt_level: gn_args['dart_debug_optimization_level'] = args.debug_opt_level gn_args['debug_optimization_level'] = args.debug_opt_level gn_args['verify_sdk_hash'] = verify_sdk_hash return gn_args def ProcessOsOption(os_name): if os_name == 'host': return HOST_OS return os_name def ProcessOptions(args): if args.arch == 'all': args.arch = 'ia32,x64,simarm,simarm64,x64c,simarm64c' if args.mode == 'all': args.mode = 'debug,release,product' if args.os == 'all': args.os = 'host,android,fuchsia' if args.sanitizer == 'all': args.sanitizer = 'none,asan,lsan,msan,tsan,ubsan' args.mode = args.mode.split(',') args.arch = args.arch.split(',') args.os = args.os.split(',') args.sanitizer = args.sanitizer.split(',') for mode in args.mode: if not mode in ['debug', 'release', 'product']: print("Unknown mode %s" % mode) return False for i, arch in enumerate(args.arch): if not arch in AVAILABLE_ARCHS: arch_lower = arch.lower() if arch_lower in AVAILABLE_ARCHS: args.arch[i] = arch_lower continue print("Unknown arch %s" % arch) return False oses = [ProcessOsOption(os_name) for os_name in args.os] for os_name in oses: if not os_name in [ 'android', 'freebsd', 'linux', 'macos', 'win32', 'fuchsia' ]: print("Unknown os %s" % os_name) return False if os_name == 'android': if not HOST_OS in ['linux', 'macos']: print( "Cross-compilation to %s is not supported on host os %s." % (os_name, HOST_OS)) return False if not arch in [ 'ia32', 'x64', 'arm', 'arm_x64', 'armv6', 'arm64', 'x64c', 'arm64c' ]: print( "Cross-compilation to %s is not supported for architecture %s." % (os_name, arch)) return False elif os_name == 'fuchsia': if HOST_OS != 'linux': print( "Cross-compilation to %s is not supported on host os %s." % (os_name, HOST_OS)) return False if not arch in ['x64', 'arm64', 'x64c', 'arm64c']: print( "Cross-compilation to %s is not supported for architecture %s." % (os_name, arch)) return False elif os_name != HOST_OS: print("Unsupported target os %s" % os_name) return False if HOST_OS != 'win' and args.use_crashpad: print("Crashpad is only supported on Windows") return False return True def os_has_ide(host_os): return host_os.startswith('win') or host_os.startswith('mac') def ide_switch(host_os): if host_os.startswith('win'): return '--ide=vs' elif host_os.startswith('mac'): return '--ide=xcode' else: return '--ide=json' def AddCommonGnOptionArgs(parser): parser.add_argument('--goma', help='Use goma', action='store_true') parser.add_argument('--no-goma', help='Disable goma', dest='goma', action='store_false') parser.set_defaults(goma=True) parser.add_argument('--verify-sdk-hash', help='Enable SDK hash checks (default)', dest='verify_sdk_hash', action='store_true') parser.add_argument('-nvh', '--no-verify-sdk-hash', help='Disable SDK hash checks', dest='verify_sdk_hash', action='store_false') parser.set_defaults(verify_sdk_hash=True) parser.add_argument('--clang', help='Use Clang', action='store_true') parser.add_argument('--no-clang', help='Disable Clang', dest='clang', action='store_false') parser.set_defaults(clang=True) parser.add_argument( '--platform-sdk', help='Directs the create_sdk target to create a smaller "Platform" SDK', default=MakePlatformSDK(), action='store_true') parser.add_argument('--use-crashpad', default=False, dest='use_crashpad', action='store_true') parser.add_argument('--use-qemu', default=False, dest='use_qemu', action='store_true') parser.add_argument('--exclude-kernel-service', help='Exclude the kernel service.', default=False, dest='exclude_kernel_service', action='store_true') parser.add_argument('--arm-float-abi', type=str, help='The ARM float ABI (soft, softfp, hard)', metavar='[soft,softfp,hard]', default='') parser.add_argument('--code-coverage', help='Enable code coverage for the standalone VM', default=False, dest="code_coverage", action='store_true') parser.add_argument('--debug-opt-level', '-d', help='The optimization level to use for debug builds', type=str) parser.add_argument('--gn-args', help='Set extra GN args', dest='gn_args', action='append') parser.add_argument( '--toolchain-prefix', '-t', type=str, help='Comma-separated list of arch=/path/to/toolchain-prefix mappings') parser.add_argument('--ide', help='Generate an IDE file.', default=os_has_ide(HOST_OS), action='store_true') parser.add_argument('--export-compile-commands', help='Export compile_commands.json database file.', default=False, action='store_true') parser.add_argument( '--target-sysroot', '-s', type=str, help='Comma-separated list of arch=/path/to/sysroot mappings') def AddCommonConfigurationArgs(parser): parser.add_argument("-a", "--arch", type=str, help='Target architectures (comma-separated).', metavar='[all,' + ','.join(AVAILABLE_ARCHS) + ']', default=utils.GuessArchitecture()) parser.add_argument('--mode', '-m', type=str, help='Build variants (comma-separated).', metavar='[all,debug,release,product]', default='debug') parser.add_argument('--os', type=str, help='Target OSs (comma-separated).', metavar='[all,host,android,fuchsia]', default='host') parser.add_argument('--sanitizer', type=str, help='Build variants (comma-separated).', metavar='[all,none,asan,lsan,msan,tsan,ubsan]', default='none') def AddOtherArgs(parser): parser.add_argument("-v", "--verbose", help='Verbose output.', default=False, action="store_true") def parse_args(args): args = args[1:] parser = argparse.ArgumentParser( description='A script to run `gn gen`.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) config_group = parser.add_argument_group('Configuration Related Arguments') AddCommonConfigurationArgs(config_group) gn_group = parser.add_argument_group('GN Related Arguments') AddCommonGnOptionArgs(gn_group) other_group = parser.add_argument_group('Other Arguments') AddOtherArgs(other_group) options = parser.parse_args(args) if not ProcessOptions(options): parser.print_help() return None return options def BuildGnCommand(args, mode, arch, target_os, sanitizer, out_dir): gn = os.path.join(DART_ROOT, 'buildtools', 'gn.exe' if utils.IsWindows() else 'gn') if not os.path.isfile(gn): raise Exception("Couldn't find the gn binary at path: " + gn) # TODO(infra): Re-enable --check. Many targets fail to use # public_deps to re-expose header files to their dependents. # See dartbug.com/32364 command = [gn, 'gen', out_dir] gn_args = ToCommandLine( ToGnArgs(args, mode, arch, target_os, sanitizer, args.verify_sdk_hash)) gn_args += GetGNArgs(args) if args.ide: command.append(ide_switch(HOST_OS)) if args.export_compile_commands: command.append('--export-compile-commands') command.append('--args=%s' % ' '.join(gn_args)) return command def RunGnOnConfiguredConfigurations(args): commands = [] for target_os in args.os: for mode in args.mode: for arch in args.arch: for sanitizer in args.sanitizer: out_dir = GetOutDir(mode, arch, target_os, sanitizer) commands.append( BuildGnCommand(args, mode, arch, target_os, sanitizer, out_dir)) if args.verbose: print("gn gen --check in %s" % out_dir) active_commands = [] def cleanup(command): print("Command failed: " + ' '.join(command)) for (_, process) in active_commands: process.terminate() for command in commands: try: process = subprocess.Popen(command, cwd=DART_ROOT) active_commands.append([command, process]) except Exception as e: print('Error: %s' % e) cleanup(command) return 1 while active_commands: time.sleep(0.1) for active_command in active_commands: (command, process) = active_command if process.poll() is not None: active_commands.remove(active_command) if process.returncode != 0: cleanup(command) return 1 return 0 def Main(argv): starttime = time.time() args = parse_args(argv) if args is None: return 1 result = RunGnOnConfiguredConfigurations(args) if args.verbose: endtime = time.time() print("GN Time: %.3f seconds" % (endtime - starttime)) return result if __name__ == '__main__': sys.exit(Main(sys.argv))
true
true
f711de27c6895e9adc35199a54ad636bb55fdab2
1,568
py
Python
tk_box.py
policmajsterdev/game
24a3bcb37cc812e3e3cdf96c0fbd7190d69cae4a
[ "MIT" ]
2
2020-09-11T16:56:02.000Z
2020-09-29T08:17:14.000Z
tk_box.py
policmajsterdev/game
24a3bcb37cc812e3e3cdf96c0fbd7190d69cae4a
[ "MIT" ]
null
null
null
tk_box.py
policmajsterdev/game
24a3bcb37cc812e3e3cdf96c0fbd7190d69cae4a
[ "MIT" ]
null
null
null
import os from tkinter import * import db_save filepath = os.path.dirname(__file__) icon_eq = os.path.join(filepath, "data\\pics\\pleczak.ico") tlos = os.path.join(filepath, "data\\pics\\hg.png") tloe = os.path.join(filepath, "data\\pics\\hp.png") def informacja(tresc, zrodlo_pliku): eq = "☆ Otrzymujesz " + tresc + " ☆" tytul_okna = "Ekwipunek/Statystyki" root = Tk() root.iconbitmap(icon_eq) root.geometry("350x150") root.title(tytul_okna) tlo = PhotoImage(file=tloe) label_image = Label(root, image=tlo) label_image.place(x=0, y=0, relwidth=1, relheight=1) text3_lbl = Label(root, text=eq, foreground="#FFFFFF", bg="#000000") text3_lbl.pack() przedmiot = PhotoImage(file=zrodlo_pliku) graph_lbl = Label(root, image=przedmiot, relief="flat", bd=0) graph_lbl.pack() root.mainloop() def zapiski(osoba, tabela): """ Pobiera informacje o osobie """ tytul_okna = "Zapiski na temat " + osoba dane = db_save.pobierz_dane(tabela) root = Tk() root.iconbitmap(icon_eq) root.geometry("400x400") root.title(tytul_okna) tlo = PhotoImage(file=tlos) label_image = Label(root, image=tlo) label_image.place(x=0, y=0, relwidth=1, relheight=1) label_head = Label(root, width=0, foreground="#000000", bg="#000000") label_head.pack() for x in dane: tex = x[0] + " : " + x[1] label = Label(root, text=tex, foreground="#FFFFFF", bg="#000000") # set your text label.pack() root.mainloop()
30.745098
91
0.63074
import os from tkinter import * import db_save filepath = os.path.dirname(__file__) icon_eq = os.path.join(filepath, "data\\pics\\pleczak.ico") tlos = os.path.join(filepath, "data\\pics\\hg.png") tloe = os.path.join(filepath, "data\\pics\\hp.png") def informacja(tresc, zrodlo_pliku): eq = "☆ Otrzymujesz " + tresc + " ☆" tytul_okna = "Ekwipunek/Statystyki" root = Tk() root.iconbitmap(icon_eq) root.geometry("350x150") root.title(tytul_okna) tlo = PhotoImage(file=tloe) label_image = Label(root, image=tlo) label_image.place(x=0, y=0, relwidth=1, relheight=1) text3_lbl = Label(root, text=eq, foreground="#FFFFFF", bg="#000000") text3_lbl.pack() przedmiot = PhotoImage(file=zrodlo_pliku) graph_lbl = Label(root, image=przedmiot, relief="flat", bd=0) graph_lbl.pack() root.mainloop() def zapiski(osoba, tabela): tytul_okna = "Zapiski na temat " + osoba dane = db_save.pobierz_dane(tabela) root = Tk() root.iconbitmap(icon_eq) root.geometry("400x400") root.title(tytul_okna) tlo = PhotoImage(file=tlos) label_image = Label(root, image=tlo) label_image.place(x=0, y=0, relwidth=1, relheight=1) label_head = Label(root, width=0, foreground="#000000", bg="#000000") label_head.pack() for x in dane: tex = x[0] + " : " + x[1] label = Label(root, text=tex, foreground="#FFFFFF", bg="#000000") label.pack() root.mainloop()
true
true
f711deb58308265e9c2f3caba88c5213cd8faeeb
25,599
py
Python
secondary_indexes_test.py
knifewine/cassandra-dtest
c59b97e8f587b484df945ec9e3a17e9316b2a5df
[ "Apache-2.0" ]
null
null
null
secondary_indexes_test.py
knifewine/cassandra-dtest
c59b97e8f587b484df945ec9e3a17e9316b2a5df
[ "Apache-2.0" ]
null
null
null
secondary_indexes_test.py
knifewine/cassandra-dtest
c59b97e8f587b484df945ec9e3a17e9316b2a5df
[ "Apache-2.0" ]
null
null
null
import random, re, time, uuid from dtest import Tester, debug from pytools import since from pyassertions import assert_invalid from cassandra import InvalidRequest from cassandra.query import BatchStatement, SimpleStatement from cassandra.protocol import ConfigurationException class TestSecondaryIndexes(Tester): def bug3367_test(self): cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'ks', 1) columns = {"password": "varchar", "gender": "varchar", "session_token": "varchar", "state": "varchar", "birth_year": "bigint"} self.create_cf(cursor, 'users', columns=columns) # insert data cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user1', 'ch@ngem3a', 'f', 'TX', 1968);") cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user2', 'ch@ngem3b', 'm', 'CA', 1971);") # create index cursor.execute("CREATE INDEX gender_key ON users (gender);") cursor.execute("CREATE INDEX state_key ON users (state);") cursor.execute("CREATE INDEX birth_year_key ON users (birth_year);") # insert data cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user3', 'ch@ngem3c', 'f', 'FL', 1978);") cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user4', 'ch@ngem3d', 'm', 'TX', 1974);") result = cursor.execute("SELECT * FROM users;") assert len(result) == 4, "Expecting 4 users, got" + str(result) result = cursor.execute("SELECT * FROM users WHERE state='TX';") assert len(result) == 2, "Expecting 2 users, got" + str(result) result = cursor.execute("SELECT * FROM users WHERE state='CA';") assert len(result) == 1, "Expecting 1 users, got" + str(result) @since('2.1') def test_low_cardinality_indexes(self): """ Checks that low-cardinality secondary index subqueries are executed concurrently """ cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() conn = self.patient_cql_connection(node1, version='3.0.0') cursor = conn cursor.max_trace_wait = 120 cursor.execute("CREATE KEYSPACE ks WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': '1'};") cursor.execute("CREATE TABLE ks.cf (a text PRIMARY KEY, b text);") cursor.execute("CREATE INDEX b_index ON ks.cf (b);") num_rows = 100 for i in range(num_rows): indexed_value = i % (num_rows / 3) # use the same indexed value three times cursor.execute("INSERT INTO ks.cf (a, b) VALUES ('%d', '%d');" % (i, indexed_value)) cluster.flush() def check_trace_events(trace): # we should see multiple requests get enqueued prior to index scan # execution happening # Look for messages like: # Submitting range requests on 769 ranges with a concurrency of 769 (0.0070312 rows per range expected) regex = r"Submitting range requests on [0-9]+ ranges with a concurrency of (\d+) \(([0-9.]+) rows per range expected\)" for event in trace.events: desc = event.description match = re.match(regex, desc) if match: concurrency = int(match.group(1)) expected_per_range = float(match.group(2)) self.assertTrue(concurrency > 1, "Expected more than 1 concurrent range request, got %d" % concurrency) self.assertTrue(expected_per_range > 0) break else: self.fail("Didn't find matching trace event") query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1';") result = cursor.execute(query, trace=True) self.assertEqual(3, len(result)) check_trace_events(query.trace) query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1' LIMIT 100;") result = cursor.execute(query, trace=True) self.assertEqual(3, len(result)) check_trace_events(query.trace) query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1' LIMIT 3;") result = cursor.execute(query, trace=True) self.assertEqual(3, len(result)) check_trace_events(query.trace) for limit in (1, 2): result = cursor.execute("SELECT * FROM ks.cf WHERE b='1' LIMIT %d;" % (limit,)) self.assertEqual(limit, len(result)) @since('2.1') def test_6924_dropping_ks(self): """Tests CASSANDRA-6924 Data inserted immediately after dropping and recreating a keyspace with an indexed column familiy is not included in the index. """ # Reproducing requires at least 3 nodes: cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() conn = self.patient_cql_connection(node1) cursor = conn #This only occurs when dropping and recreating with #the same name, so loop through this test a few times: for i in range(10): debug("round %s" % i) try: cursor.execute("DROP KEYSPACE ks") except ConfigurationException: pass self.create_ks(cursor, 'ks', 1) cursor.execute("CREATE TABLE ks.cf (key text PRIMARY KEY, col1 text);") cursor.execute("CREATE INDEX on ks.cf (col1);") for r in range(10): stmt = "INSERT INTO ks.cf (key, col1) VALUES ('%s','asdf');" % r cursor.execute(stmt) self.wait_for_schema_agreement(cursor) rows = cursor.execute("select count(*) from ks.cf WHERE col1='asdf'") count = rows[0][0] self.assertEqual(count, 10) @since('2.1') def test_6924_dropping_cf(self): """Tests CASSANDRA-6924 Data inserted immediately after dropping and recreating an indexed column family is not included in the index. """ # Reproducing requires at least 3 nodes: cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() conn = self.patient_cql_connection(node1) cursor = conn self.create_ks(cursor, 'ks', 1) #This only occurs when dropping and recreating with #the same name, so loop through this test a few times: for i in range(10): debug("round %s" % i) try: cursor.execute("DROP COLUMNFAMILY ks.cf") except InvalidRequest: pass cursor.execute("CREATE TABLE ks.cf (key text PRIMARY KEY, col1 text);") cursor.execute("CREATE INDEX on ks.cf (col1);") for r in range(10): stmt = "INSERT INTO ks.cf (key, col1) VALUES ('%s','asdf');" % r cursor.execute(stmt) self.wait_for_schema_agreement(cursor) rows = cursor.execute("select count(*) from ks.cf WHERE col1='asdf'") count = rows[0][0] self.assertEqual(count, 10) @since('2.0') def test_8280_validate_indexed_values(self): """Tests CASSANDRA-8280 Reject inserts & updates where values of any indexed column is > 64k """ cluster = self.cluster cluster.populate(1).start() node1 = cluster.nodelist()[0] conn = self.patient_cql_connection(node1) cursor = conn self.create_ks(cursor, 'ks', 1) self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b int, c text, PRIMARY KEY (a))", "CREATE INDEX ON %s(c)", "INSERT INTO %s (a, b, c) VALUES (0, 0, ?)", cursor) self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b text, c int, PRIMARY KEY (a, b))", "CREATE INDEX ON %s(b)", "INSERT INTO %s (a, b, c) VALUES (0, ?, 0)", cursor) self.insert_row_with_oversize_value("CREATE TABLE %s(a text, b int, c int, PRIMARY KEY ((a, b)))", "CREATE INDEX ON %s(a)", "INSERT INTO %s (a, b, c) VALUES (?, 0, 0)", cursor) self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b text, PRIMARY KEY (a)) WITH COMPACT STORAGE", "CREATE INDEX ON %s(b)", "INSERT INTO %s (a, b) VALUES (0, ?)", cursor) def insert_row_with_oversize_value(self, create_table_cql, create_index_cql, insert_cql, cursor): """ Validate two variations of the supplied insert statement, first as it is and then again transformed into a conditional statement """ table_name = "table_" + str(int(round(time.time() * 1000))) cursor.execute(create_table_cql % table_name) cursor.execute(create_index_cql % table_name) value = "X" * 65536 self._assert_invalid_request(cursor, insert_cql % table_name, value) self._assert_invalid_request(cursor, (insert_cql % table_name) + ' IF NOT EXISTS', value) def _assert_invalid_request(self, cursor, insert_cql, value): """ Perform two executions of the supplied statement, as a single statement and again as part of a batch """ prepared = cursor.prepare(insert_cql) self._execute_and_fail(lambda: cursor.execute(prepared, [value]), insert_cql) batch = BatchStatement() batch.add(prepared, [value]) self._execute_and_fail(lambda: cursor.execute(batch), insert_cql) def _execute_and_fail(self, operation, cql_string): try: operation() assert False, "Expecting query %s to be invalid" % cql_string except AssertionError as e: raise e except InvalidRequest: pass def wait_for_schema_agreement(self, cursor): rows = cursor.execute("SELECT schema_version FROM system.local") local_version = rows[0] all_match = True rows = cursor.execute("SELECT schema_version FROM system.peers") for peer_version in rows: if peer_version != local_version: all_match = False break if all_match: return else: time.sleep(0.10) self.wait_for_schema_agreement(cursor) class TestSecondaryIndexesOnCollections(Tester): def __init__(self, *args, **kwargs): Tester.__init__(self, *args, **kwargs) @since('2.1') def test_list_indexes(self): """ Checks that secondary indexes on lists work for querying. """ cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'list_index_search', 1) stmt = ("CREATE TABLE list_index_search.users (" "user_id uuid PRIMARY KEY," "email text," "uuids list<uuid>" ");") cursor.execute(stmt) # no index present yet, make sure there's an error trying to query column stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}" ).format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') # add index and query again (even though there are no rows in the table yet) stmt = "CREATE INDEX user_uuids on list_index_search.users (uuids);" cursor.execute(stmt) stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) # add a row which doesn't specify data for the indexed column, and query again user1_uuid = uuid.uuid4() stmt = ("INSERT INTO list_index_search.users (user_id, email)" "values ({user_id}, 'test@example.com')" ).format(user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) _id = uuid.uuid4() # alter the row to add a single item to the indexed list stmt = ("UPDATE list_index_search.users set uuids = [{id}] where user_id = {user_id}" ).format(id=_id, user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=_id) row = cursor.execute(stmt) self.assertEqual(1, len(row)) # add a bunch of user records and query them back shared_uuid = uuid.uuid4() # this uuid will be on all records log = [] for i in range(50000): user_uuid = uuid.uuid4() unshared_uuid = uuid.uuid4() # give each record a unique email address using the int index stmt = ("INSERT INTO list_index_search.users (user_id, email, uuids)" "values ({user_uuid}, '{prefix}@example.com', [{s_uuid}, {u_uuid}])" ).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid=unshared_uuid) cursor.execute(stmt) log.append( {'user_id': user_uuid, 'email':str(i)+'@example.com', 'unshared_uuid':unshared_uuid} ) # confirm there is now 50k rows with the 'shared' uuid above in the secondary index stmt = ("SELECT * from list_index_search.users where uuids contains {shared_uuid}").format(shared_uuid=shared_uuid) rows = cursor.execute(stmt) result = [row for row in rows] self.assertEqual(50000, len(result)) # shuffle the log in-place, and double-check a slice of records by querying the secondary index random.shuffle(log) for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM list_index_search.users where uuids contains {unshared_uuid}" ).format(unshared_uuid=log_entry['unshared_uuid']) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) db_user_id, db_email, db_uuids = rows[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertEqual(str(db_uuids[0]), str(shared_uuid)) self.assertEqual(str(db_uuids[1]), str(log_entry['unshared_uuid'])) @since('2.1') def test_set_indexes(self): """ Checks that secondary indexes on sets work for querying. """ cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'set_index_search', 1) stmt = ("CREATE TABLE set_index_search.users (" "user_id uuid PRIMARY KEY," "email text," "uuids set<uuid>);") cursor.execute(stmt) # no index present yet, make sure there's an error trying to query column stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') # add index and query again (even though there are no rows in the table yet) stmt = "CREATE INDEX user_uuids on set_index_search.users (uuids);" cursor.execute(stmt) stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) # add a row which doesn't specify data for the indexed column, and query again user1_uuid = uuid.uuid4() stmt = ("INSERT INTO set_index_search.users (user_id, email) values ({user_id}, 'test@example.com')" ).format(user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) _id = uuid.uuid4() # alter the row to add a single item to the indexed set stmt = ("UPDATE set_index_search.users set uuids = {{{id}}} where user_id = {user_id}").format(id=_id, user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=_id) row = cursor.execute(stmt) self.assertEqual(1, len(row)) # add a bunch of user records and query them back shared_uuid = uuid.uuid4() # this uuid will be on all records log = [] for i in range(50000): user_uuid = uuid.uuid4() unshared_uuid = uuid.uuid4() # give each record a unique email address using the int index stmt = ("INSERT INTO set_index_search.users (user_id, email, uuids)" "values ({user_uuid}, '{prefix}@example.com', {{{s_uuid}, {u_uuid}}})" ).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid=unshared_uuid) cursor.execute(stmt) log.append( {'user_id': user_uuid, 'email':str(i)+'@example.com', 'unshared_uuid':unshared_uuid} ) # confirm there is now 50k rows with the 'shared' uuid above in the secondary index stmt = ("SELECT * from set_index_search.users where uuids contains {shared_uuid}").format(shared_uuid=shared_uuid) rows = cursor.execute(stmt) result = [row for row in rows] self.assertEqual(50000, len(result)) # shuffle the log in-place, and double-check a slice of records by querying the secondary index random.shuffle(log) for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM set_index_search.users where uuids contains {unshared_uuid}" ).format(unshared_uuid=log_entry['unshared_uuid']) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) db_user_id, db_email, db_uuids = rows[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertTrue(shared_uuid in db_uuids) self.assertTrue(log_entry['unshared_uuid'] in db_uuids) @since('2.1') def test_map_indexes(self): """ Checks that secondary indexes on maps work for querying on both keys and values """ cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'map_index_search', 1) stmt = ("CREATE TABLE map_index_search.users (" "user_id uuid PRIMARY KEY," "email text," "uuids map<uuid, uuid>);") cursor.execute(stmt) # no index present yet, make sure there's an error trying to query column stmt = ("SELECT * from map_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}" ).format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') # add index on keys and query again (even though there are no rows in the table yet) stmt = "CREATE INDEX user_uuids on map_index_search.users (KEYS(uuids));" cursor.execute(stmt) stmt = "SELECT * from map_index_search.users where uuids contains key {some_uuid}".format(some_uuid=uuid.uuid4()) rows = cursor.execute(stmt) self.assertEqual(0, len(rows)) # add a row which doesn't specify data for the indexed column, and query again user1_uuid = uuid.uuid4() stmt = ("INSERT INTO map_index_search.users (user_id, email)" "values ({user_id}, 'test@example.com')" ).format(user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}").format(some_uuid=uuid.uuid4()) rows = cursor.execute(stmt) self.assertEqual(0, len(rows)) _id = uuid.uuid4() # alter the row to add a single item to the indexed map stmt = ("UPDATE map_index_search.users set uuids = {{{id}:{user_id}}} where user_id = {user_id}" ).format(id=_id, user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}").format(some_uuid=_id) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) # add a bunch of user records and query them back shared_uuid = uuid.uuid4() # this uuid will be on all records log = [] for i in range(50000): user_uuid = uuid.uuid4() unshared_uuid1 = uuid.uuid4() unshared_uuid2 = uuid.uuid4() # give each record a unique email address using the int index, add unique ids for keys and values stmt = ("INSERT INTO map_index_search.users (user_id, email, uuids)" "values ({user_uuid}, '{prefix}@example.com', {{{u_uuid1}:{u_uuid2}, {s_uuid}:{s_uuid}}})" ).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid1=unshared_uuid1, u_uuid2=unshared_uuid2) cursor.execute(stmt) log.append( {'user_id': user_uuid, 'email':str(i)+'@example.com', 'unshared_uuid1':unshared_uuid1, 'unshared_uuid2':unshared_uuid2} ) # confirm there is now 50k rows with the 'shared' uuid above in the secondary index stmt = ("SELECT * from map_index_search.users where uuids contains key {shared_uuid}" ).format(shared_uuid=shared_uuid) rows = cursor.execute(stmt) result = [row for row in rows] self.assertEqual(50000, len(result)) # shuffle the log in-place, and double-check a slice of records by querying the secondary index on keys random.shuffle(log) for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM map_index_search.users where uuids contains key {unshared_uuid1}" ).format(unshared_uuid1=log_entry['unshared_uuid1']) row = cursor.execute(stmt) rows = self.assertEqual(1, len(row)) db_user_id, db_email, db_uuids = row[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertTrue(shared_uuid in db_uuids) self.assertTrue(log_entry['unshared_uuid1'] in db_uuids) # attempt to add an index on map values as well (should fail) stmt = "CREATE INDEX user_uuids on map_index_search.users (uuids);" matching = "Cannot create index on uuids values, an index on uuids keys already exists and indexing a map on both keys and values at the same time is not currently supported" assert_invalid(cursor, stmt, matching) # since cannot have index on map keys and values remove current index on keys stmt = "DROP INDEX user_uuids;" cursor.execute(stmt) # add index on values (will index rows added prior) stmt = "CREATE INDEX user_uids on map_index_search.users (uuids);" cursor.execute(stmt) # shuffle the log in-place, and double-check a slice of records by querying the secondary index random.shuffle(log) time.sleep(10) # since we already inserted unique ids for values as well, check that appropriate recors are found for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM map_index_search.users where uuids contains {unshared_uuid2}" ).format(unshared_uuid2=log_entry['unshared_uuid2']) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) db_user_id, db_email, db_uuids = rows[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertTrue(shared_uuid in db_uuids) self.assertTrue(log_entry['unshared_uuid2'] in db_uuids.values())
43.168634
183
0.607953
import random, re, time, uuid from dtest import Tester, debug from pytools import since from pyassertions import assert_invalid from cassandra import InvalidRequest from cassandra.query import BatchStatement, SimpleStatement from cassandra.protocol import ConfigurationException class TestSecondaryIndexes(Tester): def bug3367_test(self): cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'ks', 1) columns = {"password": "varchar", "gender": "varchar", "session_token": "varchar", "state": "varchar", "birth_year": "bigint"} self.create_cf(cursor, 'users', columns=columns) cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user1', 'ch@ngem3a', 'f', 'TX', 1968);") cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user2', 'ch@ngem3b', 'm', 'CA', 1971);") cursor.execute("CREATE INDEX gender_key ON users (gender);") cursor.execute("CREATE INDEX state_key ON users (state);") cursor.execute("CREATE INDEX birth_year_key ON users (birth_year);") cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user3', 'ch@ngem3c', 'f', 'FL', 1978);") cursor.execute("INSERT INTO users (KEY, password, gender, state, birth_year) VALUES ('user4', 'ch@ngem3d', 'm', 'TX', 1974);") result = cursor.execute("SELECT * FROM users;") assert len(result) == 4, "Expecting 4 users, got" + str(result) result = cursor.execute("SELECT * FROM users WHERE state='TX';") assert len(result) == 2, "Expecting 2 users, got" + str(result) result = cursor.execute("SELECT * FROM users WHERE state='CA';") assert len(result) == 1, "Expecting 1 users, got" + str(result) @since('2.1') def test_low_cardinality_indexes(self): cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() conn = self.patient_cql_connection(node1, version='3.0.0') cursor = conn cursor.max_trace_wait = 120 cursor.execute("CREATE KEYSPACE ks WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': '1'};") cursor.execute("CREATE TABLE ks.cf (a text PRIMARY KEY, b text);") cursor.execute("CREATE INDEX b_index ON ks.cf (b);") num_rows = 100 for i in range(num_rows): indexed_value = i % (num_rows / 3) cursor.execute("INSERT INTO ks.cf (a, b) VALUES ('%d', '%d');" % (i, indexed_value)) cluster.flush() def check_trace_events(trace): regex = r"Submitting range requests on [0-9]+ ranges with a concurrency of (\d+) \(([0-9.]+) rows per range expected\)" for event in trace.events: desc = event.description match = re.match(regex, desc) if match: concurrency = int(match.group(1)) expected_per_range = float(match.group(2)) self.assertTrue(concurrency > 1, "Expected more than 1 concurrent range request, got %d" % concurrency) self.assertTrue(expected_per_range > 0) break else: self.fail("Didn't find matching trace event") query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1';") result = cursor.execute(query, trace=True) self.assertEqual(3, len(result)) check_trace_events(query.trace) query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1' LIMIT 100;") result = cursor.execute(query, trace=True) self.assertEqual(3, len(result)) check_trace_events(query.trace) query = SimpleStatement("SELECT * FROM ks.cf WHERE b='1' LIMIT 3;") result = cursor.execute(query, trace=True) self.assertEqual(3, len(result)) check_trace_events(query.trace) for limit in (1, 2): result = cursor.execute("SELECT * FROM ks.cf WHERE b='1' LIMIT %d;" % (limit,)) self.assertEqual(limit, len(result)) @since('2.1') def test_6924_dropping_ks(self): # Reproducing requires at least 3 nodes: cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() conn = self.patient_cql_connection(node1) cursor = conn #This only occurs when dropping and recreating with #the same name, so loop through this test a few times: for i in range(10): debug("round %s" % i) try: cursor.execute("DROP KEYSPACE ks") except ConfigurationException: pass self.create_ks(cursor, 'ks', 1) cursor.execute("CREATE TABLE ks.cf (key text PRIMARY KEY, col1 text);") cursor.execute("CREATE INDEX on ks.cf (col1);") for r in range(10): stmt = "INSERT INTO ks.cf (key, col1) VALUES ('%s','asdf');" % r cursor.execute(stmt) self.wait_for_schema_agreement(cursor) rows = cursor.execute("select count(*) from ks.cf WHERE col1='asdf'") count = rows[0][0] self.assertEqual(count, 10) @since('2.1') def test_6924_dropping_cf(self): # Reproducing requires at least 3 nodes: cluster = self.cluster cluster.populate(3).start() node1, node2, node3 = cluster.nodelist() conn = self.patient_cql_connection(node1) cursor = conn self.create_ks(cursor, 'ks', 1) #This only occurs when dropping and recreating with #the same name, so loop through this test a few times: for i in range(10): debug("round %s" % i) try: cursor.execute("DROP COLUMNFAMILY ks.cf") except InvalidRequest: pass cursor.execute("CREATE TABLE ks.cf (key text PRIMARY KEY, col1 text);") cursor.execute("CREATE INDEX on ks.cf (col1);") for r in range(10): stmt = "INSERT INTO ks.cf (key, col1) VALUES ('%s','asdf');" % r cursor.execute(stmt) self.wait_for_schema_agreement(cursor) rows = cursor.execute("select count(*) from ks.cf WHERE col1='asdf'") count = rows[0][0] self.assertEqual(count, 10) @since('2.0') def test_8280_validate_indexed_values(self): cluster = self.cluster cluster.populate(1).start() node1 = cluster.nodelist()[0] conn = self.patient_cql_connection(node1) cursor = conn self.create_ks(cursor, 'ks', 1) self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b int, c text, PRIMARY KEY (a))", "CREATE INDEX ON %s(c)", "INSERT INTO %s (a, b, c) VALUES (0, 0, ?)", cursor) self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b text, c int, PRIMARY KEY (a, b))", "CREATE INDEX ON %s(b)", "INSERT INTO %s (a, b, c) VALUES (0, ?, 0)", cursor) self.insert_row_with_oversize_value("CREATE TABLE %s(a text, b int, c int, PRIMARY KEY ((a, b)))", "CREATE INDEX ON %s(a)", "INSERT INTO %s (a, b, c) VALUES (?, 0, 0)", cursor) self.insert_row_with_oversize_value("CREATE TABLE %s(a int, b text, PRIMARY KEY (a)) WITH COMPACT STORAGE", "CREATE INDEX ON %s(b)", "INSERT INTO %s (a, b) VALUES (0, ?)", cursor) def insert_row_with_oversize_value(self, create_table_cql, create_index_cql, insert_cql, cursor): table_name = "table_" + str(int(round(time.time() * 1000))) cursor.execute(create_table_cql % table_name) cursor.execute(create_index_cql % table_name) value = "X" * 65536 self._assert_invalid_request(cursor, insert_cql % table_name, value) self._assert_invalid_request(cursor, (insert_cql % table_name) + ' IF NOT EXISTS', value) def _assert_invalid_request(self, cursor, insert_cql, value): prepared = cursor.prepare(insert_cql) self._execute_and_fail(lambda: cursor.execute(prepared, [value]), insert_cql) batch = BatchStatement() batch.add(prepared, [value]) self._execute_and_fail(lambda: cursor.execute(batch), insert_cql) def _execute_and_fail(self, operation, cql_string): try: operation() assert False, "Expecting query %s to be invalid" % cql_string except AssertionError as e: raise e except InvalidRequest: pass def wait_for_schema_agreement(self, cursor): rows = cursor.execute("SELECT schema_version FROM system.local") local_version = rows[0] all_match = True rows = cursor.execute("SELECT schema_version FROM system.peers") for peer_version in rows: if peer_version != local_version: all_match = False break if all_match: return else: time.sleep(0.10) self.wait_for_schema_agreement(cursor) class TestSecondaryIndexesOnCollections(Tester): def __init__(self, *args, **kwargs): Tester.__init__(self, *args, **kwargs) @since('2.1') def test_list_indexes(self): cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'list_index_search', 1) stmt = ("CREATE TABLE list_index_search.users (" "user_id uuid PRIMARY KEY," "email text," "uuids list<uuid>" ");") cursor.execute(stmt) # no index present yet, make sure there's an error trying to query column stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}" ).format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') stmt = "CREATE INDEX user_uuids on list_index_search.users (uuids);" cursor.execute(stmt) stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) user1_uuid = uuid.uuid4() stmt = ("INSERT INTO list_index_search.users (user_id, email)" "values ({user_id}, 'test@example.com')" ).format(user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) _id = uuid.uuid4() # alter the row to add a single item to the indexed list stmt = ("UPDATE list_index_search.users set uuids = [{id}] where user_id = {user_id}" ).format(id=_id, user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from list_index_search.users where uuids contains {some_uuid}").format(some_uuid=_id) row = cursor.execute(stmt) self.assertEqual(1, len(row)) # add a bunch of user records and query them back shared_uuid = uuid.uuid4() # this uuid will be on all records log = [] for i in range(50000): user_uuid = uuid.uuid4() unshared_uuid = uuid.uuid4() # give each record a unique email address using the int index stmt = ("INSERT INTO list_index_search.users (user_id, email, uuids)" "values ({user_uuid}, '{prefix}@example.com', [{s_uuid}, {u_uuid}])" ).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid=unshared_uuid) cursor.execute(stmt) log.append( {'user_id': user_uuid, 'email':str(i)+'@example.com', 'unshared_uuid':unshared_uuid} ) # confirm there is now 50k rows with the 'shared' uuid above in the secondary index stmt = ("SELECT * from list_index_search.users where uuids contains {shared_uuid}").format(shared_uuid=shared_uuid) rows = cursor.execute(stmt) result = [row for row in rows] self.assertEqual(50000, len(result)) # shuffle the log in-place, and double-check a slice of records by querying the secondary index random.shuffle(log) for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM list_index_search.users where uuids contains {unshared_uuid}" ).format(unshared_uuid=log_entry['unshared_uuid']) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) db_user_id, db_email, db_uuids = rows[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertEqual(str(db_uuids[0]), str(shared_uuid)) self.assertEqual(str(db_uuids[1]), str(log_entry['unshared_uuid'])) @since('2.1') def test_set_indexes(self): cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'set_index_search', 1) stmt = ("CREATE TABLE set_index_search.users (" "user_id uuid PRIMARY KEY," "email text," "uuids set<uuid>);") cursor.execute(stmt) # no index present yet, make sure there's an error trying to query column stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') stmt = "CREATE INDEX user_uuids on set_index_search.users (uuids);" cursor.execute(stmt) stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) user1_uuid = uuid.uuid4() stmt = ("INSERT INTO set_index_search.users (user_id, email) values ({user_id}, 'test@example.com')" ).format(user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) row = cursor.execute(stmt) self.assertEqual(0, len(row)) _id = uuid.uuid4() # alter the row to add a single item to the indexed set stmt = ("UPDATE set_index_search.users set uuids = {{{id}}} where user_id = {user_id}").format(id=_id, user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from set_index_search.users where uuids contains {some_uuid}").format(some_uuid=_id) row = cursor.execute(stmt) self.assertEqual(1, len(row)) # add a bunch of user records and query them back shared_uuid = uuid.uuid4() # this uuid will be on all records log = [] for i in range(50000): user_uuid = uuid.uuid4() unshared_uuid = uuid.uuid4() # give each record a unique email address using the int index stmt = ("INSERT INTO set_index_search.users (user_id, email, uuids)" "values ({user_uuid}, '{prefix}@example.com', {{{s_uuid}, {u_uuid}}})" ).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid=unshared_uuid) cursor.execute(stmt) log.append( {'user_id': user_uuid, 'email':str(i)+'@example.com', 'unshared_uuid':unshared_uuid} ) # confirm there is now 50k rows with the 'shared' uuid above in the secondary index stmt = ("SELECT * from set_index_search.users where uuids contains {shared_uuid}").format(shared_uuid=shared_uuid) rows = cursor.execute(stmt) result = [row for row in rows] self.assertEqual(50000, len(result)) # shuffle the log in-place, and double-check a slice of records by querying the secondary index random.shuffle(log) for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM set_index_search.users where uuids contains {unshared_uuid}" ).format(unshared_uuid=log_entry['unshared_uuid']) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) db_user_id, db_email, db_uuids = rows[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertTrue(shared_uuid in db_uuids) self.assertTrue(log_entry['unshared_uuid'] in db_uuids) @since('2.1') def test_map_indexes(self): cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() cursor = self.patient_cql_connection(node1) self.create_ks(cursor, 'map_index_search', 1) stmt = ("CREATE TABLE map_index_search.users (" "user_id uuid PRIMARY KEY," "email text," "uuids map<uuid, uuid>);") cursor.execute(stmt) # no index present yet, make sure there's an error trying to query column stmt = ("SELECT * from map_index_search.users where uuids contains {some_uuid}").format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}" ).format(some_uuid=uuid.uuid4()) assert_invalid(cursor, stmt, 'No secondary indexes on the restricted columns support the provided operators') stmt = "CREATE INDEX user_uuids on map_index_search.users (KEYS(uuids));" cursor.execute(stmt) stmt = "SELECT * from map_index_search.users where uuids contains key {some_uuid}".format(some_uuid=uuid.uuid4()) rows = cursor.execute(stmt) self.assertEqual(0, len(rows)) user1_uuid = uuid.uuid4() stmt = ("INSERT INTO map_index_search.users (user_id, email)" "values ({user_id}, 'test@example.com')" ).format(user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}").format(some_uuid=uuid.uuid4()) rows = cursor.execute(stmt) self.assertEqual(0, len(rows)) _id = uuid.uuid4() # alter the row to add a single item to the indexed map stmt = ("UPDATE map_index_search.users set uuids = {{{id}:{user_id}}} where user_id = {user_id}" ).format(id=_id, user_id=user1_uuid) cursor.execute(stmt) stmt = ("SELECT * from map_index_search.users where uuids contains key {some_uuid}").format(some_uuid=_id) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) # add a bunch of user records and query them back shared_uuid = uuid.uuid4() # this uuid will be on all records log = [] for i in range(50000): user_uuid = uuid.uuid4() unshared_uuid1 = uuid.uuid4() unshared_uuid2 = uuid.uuid4() # give each record a unique email address using the int index, add unique ids for keys and values stmt = ("INSERT INTO map_index_search.users (user_id, email, uuids)" "values ({user_uuid}, '{prefix}@example.com', {{{u_uuid1}:{u_uuid2}, {s_uuid}:{s_uuid}}})" ).format(user_uuid=user_uuid, prefix=i, s_uuid=shared_uuid, u_uuid1=unshared_uuid1, u_uuid2=unshared_uuid2) cursor.execute(stmt) log.append( {'user_id': user_uuid, 'email':str(i)+'@example.com', 'unshared_uuid1':unshared_uuid1, 'unshared_uuid2':unshared_uuid2} ) # confirm there is now 50k rows with the 'shared' uuid above in the secondary index stmt = ("SELECT * from map_index_search.users where uuids contains key {shared_uuid}" ).format(shared_uuid=shared_uuid) rows = cursor.execute(stmt) result = [row for row in rows] self.assertEqual(50000, len(result)) # shuffle the log in-place, and double-check a slice of records by querying the secondary index on keys random.shuffle(log) for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM map_index_search.users where uuids contains key {unshared_uuid1}" ).format(unshared_uuid1=log_entry['unshared_uuid1']) row = cursor.execute(stmt) rows = self.assertEqual(1, len(row)) db_user_id, db_email, db_uuids = row[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertTrue(shared_uuid in db_uuids) self.assertTrue(log_entry['unshared_uuid1'] in db_uuids) # attempt to add an index on map values as well (should fail) stmt = "CREATE INDEX user_uuids on map_index_search.users (uuids);" matching = "Cannot create index on uuids values, an index on uuids keys already exists and indexing a map on both keys and values at the same time is not currently supported" assert_invalid(cursor, stmt, matching) # since cannot have index on map keys and values remove current index on keys stmt = "DROP INDEX user_uuids;" cursor.execute(stmt) # add index on values (will index rows added prior) stmt = "CREATE INDEX user_uids on map_index_search.users (uuids);" cursor.execute(stmt) # shuffle the log in-place, and double-check a slice of records by querying the secondary index random.shuffle(log) time.sleep(10) # since we already inserted unique ids for values as well, check that appropriate recors are found for log_entry in log[:1000]: stmt = ("SELECT user_id, email, uuids FROM map_index_search.users where uuids contains {unshared_uuid2}" ).format(unshared_uuid2=log_entry['unshared_uuid2']) rows = cursor.execute(stmt) self.assertEqual(1, len(rows)) db_user_id, db_email, db_uuids = rows[0] self.assertEqual(db_user_id, log_entry['user_id']) self.assertEqual(db_email, log_entry['email']) self.assertTrue(shared_uuid in db_uuids) self.assertTrue(log_entry['unshared_uuid2'] in db_uuids.values())
true
true
f711def72eb84cc8aea41771bc929db02aee579c
1,540
py
Python
fasp/scripts/FASPScript15.py
lifebit-ai/fasp-scripts
572b0944372f0973979a3029e5c6dbeb01073d56
[ "Apache-2.0" ]
null
null
null
fasp/scripts/FASPScript15.py
lifebit-ai/fasp-scripts
572b0944372f0973979a3029e5c6dbeb01073d56
[ "Apache-2.0" ]
null
null
null
fasp/scripts/FASPScript15.py
lifebit-ai/fasp-scripts
572b0944372f0973979a3029e5c6dbeb01073d56
[ "Apache-2.0" ]
null
null
null
''' Compute on ANVIL GTEX files''' # IMPORTS import sys import json from fasp.runner import FASPRunner # The implementations we're using from fasp.loc import Gen3DRSClient from fasp.workflow import GCPLSsamtools from fasp.loc import anvilDRSClient class localSearchClient: def __init__(self): # edit the following for your local copy of the manifest file with open('/mnt/shared/gcp-user/session_data/manifest.json') as f: self.data = json.load(f) def runQuery(self, query): # return the first three records # edit this once your ready to run this on all the files results = [] for f in self.data[:1]: results.append([f['file_name'],f['object_id']]) return results def main(argv): # edit the following line for where you put your credentials file from anvil credentials_file = '/mnt/shared/gcp-user/session_data/credentials.json' faspRunner = FASPRunner(pauseSecs=0) settings = faspRunner.settings # Step 1 - Discovery # query for relevant DRS objects searchClient = localSearchClient() #drsClient = DRSMetaResolver() drsClient = anvilDRSClient(credentials_file, settings['GCPProject'], 'gs') location = 'projects/{}/locations/{}'.format(settings['GCPProject'], settings['GCPPipelineRegion']) print(location) print(settings['GCPOutputBucket']) workflowClient = GCPLSsamtools(location, settings['GCPOutputBucket'], debug=True) faspRunner.configure(searchClient, drsClient, workflowClient) faspRunner.runQuery('', 'Anvil GTEX Test') if __name__ == "__main__": main(sys.argv[1:])
27.5
100
0.746753
import sys import json from fasp.runner import FASPRunner from fasp.loc import Gen3DRSClient from fasp.workflow import GCPLSsamtools from fasp.loc import anvilDRSClient class localSearchClient: def __init__(self): # edit the following for your local copy of the manifest file with open('/mnt/shared/gcp-user/session_data/manifest.json') as f: self.data = json.load(f) def runQuery(self, query): # return the first three records # edit this once your ready to run this on all the files results = [] for f in self.data[:1]: results.append([f['file_name'],f['object_id']]) return results def main(argv): # edit the following line for where you put your credentials file from anvil credentials_file = '/mnt/shared/gcp-user/session_data/credentials.json' faspRunner = FASPRunner(pauseSecs=0) settings = faspRunner.settings # Step 1 - Discovery # query for relevant DRS objects searchClient = localSearchClient() #drsClient = DRSMetaResolver() drsClient = anvilDRSClient(credentials_file, settings['GCPProject'], 'gs') location = 'projects/{}/locations/{}'.format(settings['GCPProject'], settings['GCPPipelineRegion']) print(location) print(settings['GCPOutputBucket']) workflowClient = GCPLSsamtools(location, settings['GCPOutputBucket'], debug=True) faspRunner.configure(searchClient, drsClient, workflowClient) faspRunner.runQuery('', 'Anvil GTEX Test') if __name__ == "__main__": main(sys.argv[1:])
true
true
f711df443022c3db620cefea6c80ebd73ea4c993
1,616
py
Python
reporter.py
mehdishekoohi/my-smart-plants
55e97e37b064656175c7abe1aa6e6e8d5723f394
[ "MIT" ]
null
null
null
reporter.py
mehdishekoohi/my-smart-plants
55e97e37b064656175c7abe1aa6e6e8d5723f394
[ "MIT" ]
null
null
null
reporter.py
mehdishekoohi/my-smart-plants
55e97e37b064656175c7abe1aa6e6e8d5723f394
[ "MIT" ]
null
null
null
from apscheduler.schedulers.blocking import BlockingScheduler from internals.sensors import get_sensors_data from internals.constants import plants_csv, moisture_alarm, template_email, output_email, \ from_email, to_email, interval_minutes from internals.utils import get_dry_plants, insert_text_into_mail_body, generate_random_values, \ get_plants_name_from_csv, get_values_percentage from internals.mailing import send_email from datetime import datetime import os my_plants = get_plants_name_from_csv(plants_csv) def plants_report(): sensor_data = get_sensors_data() values = get_values_percentage(sensor_data) # values = generate_random_values(sensors_number=5) date_format = '%Y-%M-%d, %H:%M' date = datetime.now().strftime(date_format) print(f'{date}: Checking moisture values') dry_plants = get_dry_plants(my_plants, values, moisture_alarm) if len(dry_plants): print(f'Found dry plants:') for p in dry_plants: print(p) insert_text_into_mail_body(mail_template=template_email, output_file=output_email, plants_complete_list=dry_plants) response_code = send_email(email_file=output_email, from_email=from_email, to_email=to_email) if response_code == 202: os.remove(output_email) else: print('No dry plants, all is good!') print('--------------------------') # run for the fist time plants_report() # set scheduler and repeat it scheduler = BlockingScheduler() scheduler.add_job(plants_report, 'interval', minutes=interval_minutes) scheduler.start()
37.581395
101
0.731436
from apscheduler.schedulers.blocking import BlockingScheduler from internals.sensors import get_sensors_data from internals.constants import plants_csv, moisture_alarm, template_email, output_email, \ from_email, to_email, interval_minutes from internals.utils import get_dry_plants, insert_text_into_mail_body, generate_random_values, \ get_plants_name_from_csv, get_values_percentage from internals.mailing import send_email from datetime import datetime import os my_plants = get_plants_name_from_csv(plants_csv) def plants_report(): sensor_data = get_sensors_data() values = get_values_percentage(sensor_data) date_format = '%Y-%M-%d, %H:%M' date = datetime.now().strftime(date_format) print(f'{date}: Checking moisture values') dry_plants = get_dry_plants(my_plants, values, moisture_alarm) if len(dry_plants): print(f'Found dry plants:') for p in dry_plants: print(p) insert_text_into_mail_body(mail_template=template_email, output_file=output_email, plants_complete_list=dry_plants) response_code = send_email(email_file=output_email, from_email=from_email, to_email=to_email) if response_code == 202: os.remove(output_email) else: print('No dry plants, all is good!') print('--------------------------') plants_report() scheduler = BlockingScheduler() scheduler.add_job(plants_report, 'interval', minutes=interval_minutes) scheduler.start()
true
true
f711e0360364b8abc0862ca55da868e7c61c1ee4
12,851
py
Python
venv/lib/python3.6/site-packages/flask_login/utils.py
aitoehigie/britecore_flask
eef1873dbe6b2cc21f770bc6dec783007ae4493b
[ "MIT" ]
null
null
null
venv/lib/python3.6/site-packages/flask_login/utils.py
aitoehigie/britecore_flask
eef1873dbe6b2cc21f770bc6dec783007ae4493b
[ "MIT" ]
1
2021-06-01T23:32:38.000Z
2021-06-01T23:32:38.000Z
venv/lib/python3.6/site-packages/flask_login/utils.py
aitoehigie/britecore_flask
eef1873dbe6b2cc21f770bc6dec783007ae4493b
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ flask_login.utils ----------------- General utilities. """ import hmac from hashlib import sha512 from functools import wraps from werkzeug.local import LocalProxy from werkzeug.security import safe_str_cmp from werkzeug.urls import url_decode, url_encode from flask import ( _request_ctx_stack, current_app, request, session, url_for, has_request_context, ) from ._compat import text_type, urlparse, urlunparse from .config import COOKIE_NAME, EXEMPT_METHODS from .signals import user_logged_in, user_logged_out, user_login_confirmed #: A proxy for the current user. If no user is logged in, this will be an #: anonymous user current_user = LocalProxy(lambda: _get_user()) def encode_cookie(payload): """ This will encode a ``unicode`` value into a cookie, and sign that cookie with the app's secret key. :param payload: The value to encode, as `unicode`. :type payload: unicode """ return u"{0}|{1}".format(payload, _cookie_digest(payload)) def decode_cookie(cookie): """ This decodes a cookie given by `encode_cookie`. If verification of the cookie fails, ``None`` will be implicitly returned. :param cookie: An encoded cookie. :type cookie: str """ try: payload, digest = cookie.rsplit(u"|", 1) if hasattr(digest, "decode"): digest = digest.decode("ascii") # pragma: no cover except ValueError: return if safe_str_cmp(_cookie_digest(payload), digest): return payload def make_next_param(login_url, current_url): """ Reduces the scheme and host from a given URL so it can be passed to the given `login` URL more efficiently. :param login_url: The login URL being redirected to. :type login_url: str :param current_url: The URL to reduce. :type current_url: str """ l = urlparse(login_url) c = urlparse(current_url) if (not l.scheme or l.scheme == c.scheme) and ( not l.netloc or l.netloc == c.netloc ): return urlunparse(("", "", c.path, c.params, c.query, "")) return current_url def expand_login_view(login_view): """ Returns the url for the login view, expanding the view name to a url if needed. :param login_view: The name of the login view or a URL for the login view. :type login_view: str """ if login_view.startswith(("https://", "http://", "/")): return login_view else: return url_for(login_view) def login_url(login_view, next_url=None, next_field="next"): """ Creates a URL for redirecting to a login page. If only `login_view` is provided, this will just return the URL for it. If `next_url` is provided, however, this will append a ``next=URL`` parameter to the query string so that the login view can redirect back to that URL. Flask-Login's default unauthorized handler uses this function when redirecting to your login url. To force the host name used, set `FORCE_HOST_FOR_REDIRECTS` to a host. This prevents from redirecting to external sites if request headers Host or X-Forwarded-For are present. :param login_view: The name of the login view. (Alternately, the actual URL to the login view.) :type login_view: str :param next_url: The URL to give the login view for redirection. :type next_url: str :param next_field: What field to store the next URL in. (It defaults to ``next``.) :type next_field: str """ base = expand_login_view(login_view) if next_url is None: return base parsed_result = urlparse(base) md = url_decode(parsed_result.query) md[next_field] = make_next_param(base, next_url) netloc = current_app.config.get("FORCE_HOST_FOR_REDIRECTS") or parsed_result.netloc parsed_result = parsed_result._replace( netloc=netloc, query=url_encode(md, sort=True) ) return urlunparse(parsed_result) def login_fresh(): """ This returns ``True`` if the current login is fresh. """ return session.get("_fresh", False) def login_user(user, remember=False, duration=None, force=False, fresh=True): """ Logs a user in. You should pass the actual user object to this. If the user's `is_active` property is ``False``, they will not be logged in unless `force` is ``True``. This will return ``True`` if the log in attempt succeeds, and ``False`` if it fails (i.e. because the user is inactive). :param user: The user object to log in. :type user: object :param remember: Whether to remember the user after their session expires. Defaults to ``False``. :type remember: bool :param duration: The amount of time before the remember cookie expires. If ``None`` the value set in the settings is used. Defaults to ``None``. :type duration: :class:`datetime.timedelta` :param force: If the user is inactive, setting this to ``True`` will log them in regardless. Defaults to ``False``. :type force: bool :param fresh: setting this to ``False`` will log in the user with a session marked as not "fresh". Defaults to ``True``. :type fresh: bool """ if not force and not user.is_active: return False user_id = getattr(user, current_app.login_manager.id_attribute)() session["user_id"] = user_id session["_fresh"] = fresh session["_id"] = current_app.login_manager._session_identifier_generator() if remember: session["remember"] = "set" if duration is not None: try: # equal to timedelta.total_seconds() but works with Python 2.6 session["remember_seconds"] = ( duration.microseconds + (duration.seconds + duration.days * 24 * 3600) * 10 ** 6 ) / 10.0 ** 6 except AttributeError: raise Exception( "duration must be a datetime.timedelta, " "instead got: {0}".format(duration) ) _request_ctx_stack.top.user = user user_logged_in.send(current_app._get_current_object(), user=_get_user()) return True def logout_user(): """ Logs a user out. (You do not need to pass the actual user.) This will also clean up the remember me cookie if it exists. """ user = _get_user() if "user_id" in session: session.pop("user_id") if "_fresh" in session: session.pop("_fresh") cookie_name = current_app.config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) if cookie_name in request.cookies: session["remember"] = "clear" if "remember_seconds" in session: session.pop("remember_seconds") user_logged_out.send(current_app._get_current_object(), user=user) current_app.login_manager.reload_user() return True def confirm_login(): """ This sets the current session as fresh. Sessions become stale when they are reloaded from a cookie. """ session["_fresh"] = True session["_id"] = current_app.login_manager._session_identifier_generator() user_login_confirmed.send(current_app._get_current_object()) def login_required(func): """ If you decorate a view with this, it will ensure that the current user is logged in and authenticated before calling the actual view. (If they are not, it calls the :attr:`LoginManager.unauthorized` callback.) For example:: @app.route('/post') @login_required def post(): pass If there are only certain times you need to require that your user is logged in, you can do so with:: if not current_user.is_authenticated: return current_app.login_manager.unauthorized() ...which is essentially the code that this function adds to your views. It can be convenient to globally turn off authentication when unit testing. To enable this, if the application configuration variable `LOGIN_DISABLED` is set to `True`, this decorator will be ignored. .. Note :: Per `W3 guidelines for CORS preflight requests <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_, HTTP ``OPTIONS`` requests are exempt from login checks. :param func: The view function to decorate. :type func: function """ @wraps(func) def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS: return func(*args, **kwargs) elif current_app.login_manager._login_disabled: return func(*args, **kwargs) elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() return func(*args, **kwargs) return decorated_view def fresh_login_required(func): """ If you decorate a view with this, it will ensure that the current user's login is fresh - i.e. their session was not restored from a 'remember me' cookie. Sensitive operations, like changing a password or e-mail, should be protected with this, to impede the efforts of cookie thieves. If the user is not authenticated, :meth:`LoginManager.unauthorized` is called as normal. If they are authenticated, but their session is not fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that case, you will need to provide a :attr:`LoginManager.refresh_view`.) Behaves identically to the :func:`login_required` decorator with respect to configutation variables. .. Note :: Per `W3 guidelines for CORS preflight requests <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_, HTTP ``OPTIONS`` requests are exempt from login checks. :param func: The view function to decorate. :type func: function """ @wraps(func) def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS: return func(*args, **kwargs) elif current_app.login_manager._login_disabled: return func(*args, **kwargs) elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() elif not login_fresh(): return current_app.login_manager.needs_refresh() return func(*args, **kwargs) return decorated_view def set_login_view(login_view, blueprint=None): """ Sets the login view for the app or blueprint. If a blueprint is passed, the login view is set for this blueprint on ``blueprint_login_views``. :param login_view: The user object to log in. :type login_view: str :param blueprint: The blueprint which this login view should be set on. Defaults to ``None``. :type blueprint: object """ num_login_views = len(current_app.login_manager.blueprint_login_views) if blueprint is not None or num_login_views != 0: (current_app.login_manager.blueprint_login_views[blueprint.name]) = login_view if ( current_app.login_manager.login_view is not None and None not in current_app.login_manager.blueprint_login_views ): ( current_app.login_manager.blueprint_login_views[None] ) = current_app.login_manager.login_view current_app.login_manager.login_view = None else: current_app.login_manager.login_view = login_view def _get_user(): if has_request_context() and not hasattr(_request_ctx_stack.top, "user"): current_app.login_manager._load_user() return getattr(_request_ctx_stack.top, "user", None) def _cookie_digest(payload, key=None): key = _secret_key(key) return hmac.new(key, payload.encode("utf-8"), sha512).hexdigest() def _get_remote_addr(): address = request.headers.get("X-Forwarded-For", request.remote_addr) if address is not None: # An 'X-Forwarded-For' header includes a comma separated list of the # addresses, the first address being the actual remote address. address = address.encode("utf-8").split(b",")[0].strip() return address def _create_identifier(): user_agent = request.headers.get("User-Agent") if user_agent is not None: user_agent = user_agent.encode("utf-8") base = "{0}|{1}".format(_get_remote_addr(), user_agent) if str is bytes: base = text_type(base, "utf-8", errors="replace") # pragma: no cover h = sha512() h.update(base.encode("utf8")) return h.hexdigest() def _user_context_processor(): return dict(current_user=_get_user()) def _secret_key(key=None): if key is None: key = current_app.config["SECRET_KEY"] if isinstance(key, text_type): # pragma: no cover key = key.encode("latin1") # ensure bytes return key
32.699746
87
0.668119
import hmac from hashlib import sha512 from functools import wraps from werkzeug.local import LocalProxy from werkzeug.security import safe_str_cmp from werkzeug.urls import url_decode, url_encode from flask import ( _request_ctx_stack, current_app, request, session, url_for, has_request_context, ) from ._compat import text_type, urlparse, urlunparse from .config import COOKIE_NAME, EXEMPT_METHODS from .signals import user_logged_in, user_logged_out, user_login_confirmed current_user = LocalProxy(lambda: _get_user()) def encode_cookie(payload): return u"{0}|{1}".format(payload, _cookie_digest(payload)) def decode_cookie(cookie): try: payload, digest = cookie.rsplit(u"|", 1) if hasattr(digest, "decode"): digest = digest.decode("ascii") except ValueError: return if safe_str_cmp(_cookie_digest(payload), digest): return payload def make_next_param(login_url, current_url): l = urlparse(login_url) c = urlparse(current_url) if (not l.scheme or l.scheme == c.scheme) and ( not l.netloc or l.netloc == c.netloc ): return urlunparse(("", "", c.path, c.params, c.query, "")) return current_url def expand_login_view(login_view): if login_view.startswith(("https://", "http://", "/")): return login_view else: return url_for(login_view) def login_url(login_view, next_url=None, next_field="next"): base = expand_login_view(login_view) if next_url is None: return base parsed_result = urlparse(base) md = url_decode(parsed_result.query) md[next_field] = make_next_param(base, next_url) netloc = current_app.config.get("FORCE_HOST_FOR_REDIRECTS") or parsed_result.netloc parsed_result = parsed_result._replace( netloc=netloc, query=url_encode(md, sort=True) ) return urlunparse(parsed_result) def login_fresh(): return session.get("_fresh", False) def login_user(user, remember=False, duration=None, force=False, fresh=True): if not force and not user.is_active: return False user_id = getattr(user, current_app.login_manager.id_attribute)() session["user_id"] = user_id session["_fresh"] = fresh session["_id"] = current_app.login_manager._session_identifier_generator() if remember: session["remember"] = "set" if duration is not None: try: session["remember_seconds"] = ( duration.microseconds + (duration.seconds + duration.days * 24 * 3600) * 10 ** 6 ) / 10.0 ** 6 except AttributeError: raise Exception( "duration must be a datetime.timedelta, " "instead got: {0}".format(duration) ) _request_ctx_stack.top.user = user user_logged_in.send(current_app._get_current_object(), user=_get_user()) return True def logout_user(): user = _get_user() if "user_id" in session: session.pop("user_id") if "_fresh" in session: session.pop("_fresh") cookie_name = current_app.config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) if cookie_name in request.cookies: session["remember"] = "clear" if "remember_seconds" in session: session.pop("remember_seconds") user_logged_out.send(current_app._get_current_object(), user=user) current_app.login_manager.reload_user() return True def confirm_login(): session["_fresh"] = True session["_id"] = current_app.login_manager._session_identifier_generator() user_login_confirmed.send(current_app._get_current_object()) def login_required(func): @wraps(func) def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS: return func(*args, **kwargs) elif current_app.login_manager._login_disabled: return func(*args, **kwargs) elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() return func(*args, **kwargs) return decorated_view def fresh_login_required(func): @wraps(func) def decorated_view(*args, **kwargs): if request.method in EXEMPT_METHODS: return func(*args, **kwargs) elif current_app.login_manager._login_disabled: return func(*args, **kwargs) elif not current_user.is_authenticated: return current_app.login_manager.unauthorized() elif not login_fresh(): return current_app.login_manager.needs_refresh() return func(*args, **kwargs) return decorated_view def set_login_view(login_view, blueprint=None): num_login_views = len(current_app.login_manager.blueprint_login_views) if blueprint is not None or num_login_views != 0: (current_app.login_manager.blueprint_login_views[blueprint.name]) = login_view if ( current_app.login_manager.login_view is not None and None not in current_app.login_manager.blueprint_login_views ): ( current_app.login_manager.blueprint_login_views[None] ) = current_app.login_manager.login_view current_app.login_manager.login_view = None else: current_app.login_manager.login_view = login_view def _get_user(): if has_request_context() and not hasattr(_request_ctx_stack.top, "user"): current_app.login_manager._load_user() return getattr(_request_ctx_stack.top, "user", None) def _cookie_digest(payload, key=None): key = _secret_key(key) return hmac.new(key, payload.encode("utf-8"), sha512).hexdigest() def _get_remote_addr(): address = request.headers.get("X-Forwarded-For", request.remote_addr) if address is not None: address = address.encode("utf-8").split(b",")[0].strip() return address def _create_identifier(): user_agent = request.headers.get("User-Agent") if user_agent is not None: user_agent = user_agent.encode("utf-8") base = "{0}|{1}".format(_get_remote_addr(), user_agent) if str is bytes: base = text_type(base, "utf-8", errors="replace") h = sha512() h.update(base.encode("utf8")) return h.hexdigest() def _user_context_processor(): return dict(current_user=_get_user()) def _secret_key(key=None): if key is None: key = current_app.config["SECRET_KEY"] if isinstance(key, text_type): key = key.encode("latin1") return key
true
true
f711e09f98417b900c7b6eb63431d1bc6bce250d
435
py
Python
examples/pybullet/examples/integrate.py
felipeek/bullet3
6a59241074720e9df119f2f86bc01765917feb1e
[ "Zlib" ]
9,136
2015-01-02T00:41:45.000Z
2022-03-31T15:30:02.000Z
examples/pybullet/examples/integrate.py
felipeek/bullet3
6a59241074720e9df119f2f86bc01765917feb1e
[ "Zlib" ]
2,424
2015-01-05T08:55:58.000Z
2022-03-30T19:34:55.000Z
examples/pybullet/examples/integrate.py
felipeek/bullet3
6a59241074720e9df119f2f86bc01765917feb1e
[ "Zlib" ]
2,921
2015-01-02T10:19:30.000Z
2022-03-31T02:48:42.000Z
import pybullet as p import pybullet_data p.connect(p.GUI) p.setAdditionalSearchPath(pybullet_data.getDataPath()) cube = p.loadURDF("cube.urdf") frequency = 240 timeStep = 1. / frequency p.setGravity(0, 0, -9.8) p.changeDynamics(cube, -1, linearDamping=0, angularDamping=0) p.setPhysicsEngineParameter(fixedTimeStep=timeStep) for i in range(frequency): p.stepSimulation() pos, orn = p.getBasePositionAndOrientation(cube) print(pos)
27.1875
61
0.783908
import pybullet as p import pybullet_data p.connect(p.GUI) p.setAdditionalSearchPath(pybullet_data.getDataPath()) cube = p.loadURDF("cube.urdf") frequency = 240 timeStep = 1. / frequency p.setGravity(0, 0, -9.8) p.changeDynamics(cube, -1, linearDamping=0, angularDamping=0) p.setPhysicsEngineParameter(fixedTimeStep=timeStep) for i in range(frequency): p.stepSimulation() pos, orn = p.getBasePositionAndOrientation(cube) print(pos)
true
true
f711e12fa6f296395e89cfcd5dca7f4594f6d5eb
399
py
Python
shielding/routing.py
gustavosilva-gss/shielding
476f0a30c1363730604f24428262670ebede39ad
[ "MIT" ]
null
null
null
shielding/routing.py
gustavosilva-gss/shielding
476f0a30c1363730604f24428262670ebede39ad
[ "MIT" ]
null
null
null
shielding/routing.py
gustavosilva-gss/shielding
476f0a30c1363730604f24428262670ebede39ad
[ "MIT" ]
null
null
null
#https://channels.readthedocs.io/en/latest/installation.html from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter import website.routing application = ProtocolTypeRouter({ # (http->django views is added by default) 'websocket': AuthMiddlewareStack( URLRouter( website.routing.websocket_urlpatterns ) ), })
28.5
60
0.739348
from channels.auth import AuthMiddlewareStack from channels.routing import ProtocolTypeRouter, URLRouter import website.routing application = ProtocolTypeRouter({ 'websocket': AuthMiddlewareStack( URLRouter( website.routing.websocket_urlpatterns ) ), })
true
true
f711e15b750f7aca7716ed5cd772e97d6c665d99
19,759
py
Python
domain_adaptation/corruptions/corruptions.py
DexiongYung/robustnav_AE
f2b1b5bb8780e4e6ae5f81c127b7589cfc949801
[ "MIT" ]
null
null
null
domain_adaptation/corruptions/corruptions.py
DexiongYung/robustnav_AE
f2b1b5bb8780e4e6ae5f81c127b7589cfc949801
[ "MIT" ]
null
null
null
domain_adaptation/corruptions/corruptions.py
DexiongYung/robustnav_AE
f2b1b5bb8780e4e6ae5f81c127b7589cfc949801
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import numpy as np from PIL import Image # /////////////// Corruption Helpers /////////////// import skimage as sk from torchvision import transforms import torchvision.transforms.functional as F from skimage.filters import gaussian from io import BytesIO from wand.image import Image as WandImage from wand.api import library as wandlibrary import wand.color as WandColor import ctypes from PIL import Image as PILImage import cv2 from scipy.ndimage import zoom as scizoom from scipy.ndimage.interpolation import map_coordinates import warnings import os from pkg_resources import resource_filename warnings.simplefilter("ignore", UserWarning) def disk(radius, alias_blur=0.1, dtype=np.float32): # 17 x 17 kernel causes seg fault in opencv # if radius <= 8: # L = np.arange(-8, 8 + 1) # ksize = (3, 3) if radius <= 5: L = np.arange(-5, 5 + 1) ksize = (3, 3) else: L = np.arange(-radius, radius + 1) ksize = (5, 5) X, Y = np.meshgrid(L, L) aliased_disk = np.array((X ** 2 + Y ** 2) <= radius ** 2, dtype=dtype) aliased_disk /= np.sum(aliased_disk) # supersample disk to antialias return cv2.GaussianBlur(aliased_disk, ksize=ksize, sigmaX=alias_blur) # Tell Python about the C method wandlibrary.MagickMotionBlurImage.argtypes = (ctypes.c_void_p, # wand ctypes.c_double, # radius ctypes.c_double, # sigma ctypes.c_double) # angle # Extend wand.image.Image class to include method signature class MotionImage(WandImage): def motion_blur(self, radius=0.0, sigma=0.0, angle=0.0): wandlibrary.MagickMotionBlurImage(self.wand, radius, sigma, angle) # modification of https://github.com/FLHerne/mapgen/blob/master/diamondsquare.py def plasma_fractal(mapsize=512, wibbledecay=3): """ Generate a heightmap using diamond-square algorithm. Return square 2d array, side length 'mapsize', of floats in range 0-255. 'mapsize' must be a power of two. """ assert (mapsize & (mapsize - 1) == 0) maparray = np.empty((mapsize, mapsize), dtype=np.float_) maparray[0, 0] = 0 stepsize = mapsize wibble = 100 def wibbledmean(array): return array / 4 + wibble * np.random.uniform(-wibble, wibble, array.shape) def fillsquares(): """For each square of points stepsize apart, calculate middle value as mean of points + wibble""" cornerref = maparray[0:mapsize:stepsize, 0:mapsize:stepsize] squareaccum = cornerref + np.roll(cornerref, shift=-1, axis=0) squareaccum += np.roll(squareaccum, shift=-1, axis=1) maparray[stepsize // 2:mapsize:stepsize, stepsize // 2:mapsize:stepsize] = wibbledmean(squareaccum) def filldiamonds(): """For each diamond of points stepsize apart, calculate middle value as mean of points + wibble""" mapsize = maparray.shape[0] drgrid = maparray[stepsize // 2:mapsize:stepsize, stepsize // 2:mapsize:stepsize] ulgrid = maparray[0:mapsize:stepsize, 0:mapsize:stepsize] ldrsum = drgrid + np.roll(drgrid, 1, axis=0) lulsum = ulgrid + np.roll(ulgrid, -1, axis=1) ltsum = ldrsum + lulsum maparray[0:mapsize:stepsize, stepsize // 2:mapsize:stepsize] = wibbledmean(ltsum) tdrsum = drgrid + np.roll(drgrid, 1, axis=1) tulsum = ulgrid + np.roll(ulgrid, -1, axis=0) ttsum = tdrsum + tulsum maparray[stepsize // 2:mapsize:stepsize, 0:mapsize:stepsize] = wibbledmean(ttsum) while stepsize >= 2: fillsquares() filldiamonds() stepsize //= 2 wibble /= wibbledecay maparray -= maparray.min() return maparray / maparray.max() def clipped_zoom(img, zoom_factor): h = img.shape[0] # ceil crop height(= crop width) ch = int(np.ceil(h / float(zoom_factor))) top = (h - ch) // 2 img = scizoom(img[top:top + ch, top:top + ch], (zoom_factor, zoom_factor, 1), order=1) # trim off any extra pixels trim_top = (img.shape[0] - h) // 2 return img[trim_top:trim_top + h, trim_top:trim_top + h] # /////////////// End Corruption Helpers /////////////// # /////////////// Corruptions /////////////// def gaussian_noise(x, severity=1): # c = [.08, .12, 0.18, 0.26, 0.38][severity - 1] c = [0.04, 0.06, .08, .09, .10][severity - 1] x = np.array(x) / 255. return np.clip(x + np.random.normal(size=x.shape, scale=c), 0, 1) * 255 def shot_noise(x, severity=1): # c = [60, 25, 12, 5, 3][severity - 1] c = [500, 250, 100, 75, 50][severity - 1] x = np.array(x) / 255. return np.clip(np.random.poisson(x * c) / float(c), 0, 1) * 255 def impulse_noise(x, severity=1): # c = [.03, .06, .09, 0.17, 0.27][severity - 1] c = [.01, .02, .03, .05, .07][severity - 1] x = sk.util.random_noise(np.array(x) / 255., mode='s&p', amount=c) return np.clip(x, 0, 1) * 255 def speckle_noise(x, severity=1): # c = [.15, .2, 0.35, 0.45, 0.6][severity - 1] c = [.06, .1, .12, .16, .2][severity - 1] x = np.array(x) / 255. return np.clip(x + x * np.random.normal(size=x.shape, scale=c), 0, 1) * 255 def gaussian_blur(x, severity=1): # c = [1, 2, 3, 4, 6][severity - 1] c = [.4, .6, 0.7, .8, 1][severity - 1] x = gaussian(np.array(x) / 255., sigma=c, multichannel=True) return np.clip(x, 0, 1) * 255 def glass_blur(x, severity=1): # sigma, max_delta, iterations # c = [(0.7, 1, 2), (0.9, 2, 1), (1, 2, 3), (1.1, 3, 2), (1.5, 4, 2)][severity - 1] c = [(0.05,1,1), (0.25,1,1), (0.4,1,1), (0.25,1,2), (0.4,1,2)][severity - 1] x = np.uint8(gaussian(np.array(x) / 255., sigma=c[0], multichannel=True) * 255) size = x.shape[0] # locally shuffle pixels for i in range(c[2]): for h in range(size - c[1], c[1], -1): for w in range(size - c[1], c[1], -1): dx, dy = np.random.randint(-c[1], c[1], size=(2,)) h_prime, w_prime = h + dy, w + dx # swap x[h, w], x[h_prime, w_prime] = x[h_prime, w_prime], x[h, w] return np.clip(gaussian(x / 255., sigma=c[0], multichannel=True), 0, 1) * 255 def defocus_blur(x, severity=1): # c = [(3, 0.1), (4, 0.5), (6, 0.5), (8, 0.5), (10, 0.5)][severity - 1] c = [(0.3, 0.4), (0.4, 0.5), (0.5, 0.6), (1, 0.2), (1.5, 0.1)][severity - 1] x = np.array(x) / 255. kernel = disk(radius=c[0], alias_blur=c[1]) channels = [] for d in range(3): channels.append(cv2.filter2D(x[:, :, d].astype(np.float32), -1, kernel)) channels = np.array(channels).transpose((1, 2, 0)) # 3x32x32 -> 32x32x3 return np.clip(channels, 0, 1) * 255 def motion_blur(x, severity=1): # c = [(10, 3), (15, 5), (15, 8), (15, 12), (20, 15)][severity - 1] c = [(6,1), (6,1.5), (6,2), (8,2), (9,2.5)][severity - 1] output = BytesIO() Image.fromarray(x).save(output, format='PNG') x = MotionImage(blob=output.getvalue()) x.motion_blur(radius=c[0], sigma=c[1], angle=np.random.uniform(-45, 45)) x = cv2.imdecode(np.fromstring(x.make_blob(), np.uint8), cv2.IMREAD_UNCHANGED) if x.shape != (512, 512): return np.clip(x[..., [2, 1, 0]], 0, 255) # BGR to RGB else: # greyscale to RGB return np.clip(np.array([x, x, x]).transpose((1, 2, 0)), 0, 255) def zoom_blur(x, severity=1): # c = [np.arange(1, 1.11, 0.01), # np.arange(1, 1.16, 0.01), # np.arange(1, 1.21, 0.02), # np.arange(1, 1.26, 0.02), # np.arange(1, 1.31, 0.03)][severity - 1] c = [np.arange(1, 1.06, 0.01), np.arange(1, 1.11, 0.01), np.arange(1, 1.16, 0.01), np.arange(1, 1.21, 0.01), np.arange(1, 1.26, 0.01)][severity - 1] x = (np.array(x) / 255.).astype(np.float32) out = np.zeros_like(x) for zoom_factor in c: out += clipped_zoom(x, zoom_factor) x = (x + out) / (len(c) + 1) return np.clip(x, 0, 1) * 255 def fog(x, severity=1): # c = [(1.5, 2), (2., 2), (2.5, 1.7), (2.5, 1.5), (3., 1.4)][severity - 1] c = [(.2,3), (.5,3), (0.75,2.5), (1,2), (1.5,1.75)][severity - 1] size = x.shape[0] x = np.array(x) / 255. max_val = x.max() x += c[0] * plasma_fractal(wibbledecay=c[1])[:size, :size][..., np.newaxis] return np.clip(x * max_val / (max_val + c[0]), 0, 1) # *255 def frost(x, severity=1): size = x.shape[0] # c = [(1, 0.4), # (0.8, 0.6), # (0.7, 0.7), # (0.65, 0.7), # (0.6, 0.75)][severity - 1] c = [(1, 0.2), (1, 0.3), (0.9, 0.4), (0.85, 0.4), (0.75, 0.45)][severity - 1] idx = np.random.randint(5) filename = [resource_filename(__name__, 'frost/frost1.png'), resource_filename(__name__, 'frost/frost2.png'), resource_filename(__name__, 'frost/frost3.png'), resource_filename(__name__, 'frost/frost4.jpg'), resource_filename(__name__, 'frost/frost5.jpg'), resource_filename(__name__, 'frost/frost6.jpg')][idx] frost = cv2.imread(filename) # randomly crop and convert to rgb x_start, y_start = np.random.randint(0, frost.shape[0] - size), np.random.randint(0, frost.shape[1] - size) frost = frost[x_start:x_start + size, y_start:y_start + size][..., [2, 1, 0]] return np.clip(c[0] * np.array(x) + c[1] * frost, 0, 255) / 255 def snow(x, severity=1): # c = [(0.1, 0.3, 3, 0.5, 10, 4, 0.8), # (0.2, 0.3, 2, 0.5, 12, 4, 0.7), # (0.55, 0.3, 4, 0.9, 12, 8, 0.7), # (0.55, 0.3, 4.5, 0.85, 12, 8, 0.65), # (0.55, 0.3, 2.5, 0.85, 12, 12, 0.55)][severity - 1] c = [(0.1,0.2,1,0.6,8,3,0.95), (0.1,0.2,1,0.5,10,4,0.9), (0.15,0.3,1.75,0.55,10,4,0.9), (0.25,0.3,2.25,0.6,12,6,0.85), (0.3,0.3,1.25,0.65,14,12,0.8)][severity - 1] size = x.shape[0] x = np.array(x, dtype=np.float32) / 255. snow_layer = np.random.normal(size=x.shape[:2], loc=c[0], scale=c[1]) # [:2] for monochrome snow_layer = clipped_zoom(snow_layer[..., np.newaxis], c[2]) snow_layer[snow_layer < c[3]] = 0 snow_layer = PILImage.fromarray((np.clip(snow_layer.squeeze(), 0, 1) * 255).astype(np.uint8), mode='L') output = BytesIO() snow_layer.save(output, format='PNG') snow_layer = MotionImage(blob=output.getvalue()) snow_layer.motion_blur(radius=c[4], sigma=c[5], angle=np.random.uniform(-135, -45)) snow_layer = cv2.imdecode(np.fromstring(snow_layer.make_blob(), np.uint8), cv2.IMREAD_UNCHANGED) / 255. snow_layer = snow_layer[..., np.newaxis] x = c[6] * x + (1 - c[6]) * np.maximum(x, cv2.cvtColor(x, code = cv2.COLOR_RGB2GRAY).reshape(size, size, 1) * 1.5 + 0.5) return np.clip(x + snow_layer + np.rot90(snow_layer, k=2), 0, 1) * 255 def spatter(x, severity=1): # c = [(0.65, 0.3, 4, 0.69, 0.6, 0), # (0.65, 0.3, 3, 0.68, 0.6, 0), # (0.65, 0.3, 2, 0.68, 0.5, 0), # (0.65, 0.3, 1, 0.65, 1.5, 1), # (0.67, 0.4, 1, 0.65, 1.5, 1)][severity - 1] c = [(0.62,0.1,0.7,0.7,0.5,0), (0.65,0.1,0.8,0.7,0.5,0), (0.65,0.3,1,0.69,0.5,0), (0.65,0.1,0.7,0.69,0.6,1), (0.65,0.1,0.5,0.68,0.6,1)][severity - 1] x = np.array(x, dtype=np.float32) / 255. liquid_layer = np.random.normal(size=x.shape[:2], loc=c[0], scale=c[1]) liquid_layer = gaussian(liquid_layer, sigma=c[2]) liquid_layer[liquid_layer < c[3]] = 0 if c[5] == 0: liquid_layer = (liquid_layer * 255).astype(np.uint8) dist = 255 - cv2.Canny(liquid_layer, 50, 150) dist = cv2.distanceTransform(dist, cv2.DIST_L2, 5) _, dist = cv2.threshold(dist, 20, 20, cv2.THRESH_TRUNC) dist = cv2.blur(dist, (3, 3)).astype(np.uint8) dist = cv2.equalizeHist(dist) ker = np.array([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]]) dist = cv2.filter2D(dist, cv2.CV_8U, ker) dist = cv2.blur(dist, (3, 3)).astype(np.float32) m = cv2.cvtColor(liquid_layer * dist, cv2.COLOR_GRAY2BGRA) m /= np.max(m, axis=(0, 1)) m *= c[4] # water is pale turqouise color = np.concatenate((175 / 255. * np.ones_like(m[..., :1]), 238 / 255. * np.ones_like(m[..., :1]), 238 / 255. * np.ones_like(m[..., :1])), axis=2) color = cv2.cvtColor(color, cv2.COLOR_BGR2BGRA) x = cv2.cvtColor(x, cv2.COLOR_BGR2BGRA) return cv2.cvtColor(np.clip(x + m * color, 0, 1), cv2.COLOR_BGRA2BGR) * 255 else: m = np.where(liquid_layer > c[3], 1, 0) m = gaussian(m.astype(np.float32), sigma=c[4]) m[m < 0.8] = 0 # mud brown color = np.concatenate((63 / 255. * np.ones_like(x[..., :1]), 42 / 255. * np.ones_like(x[..., :1]), 20 / 255. * np.ones_like(x[..., :1])), axis=2) color *= m[..., np.newaxis] x *= (1 - m[..., np.newaxis]) return np.clip(x + color, 0, 1) * 255 def contrast(x, severity=1): # c = [0.4, .3, .2, .1, .05][severity - 1] c = [.75, .5, .4, .3, 0.15][severity - 1] x = np.array(x) / 255. means = np.mean(x, axis=(0, 1), keepdims=True) return np.clip((x - means) * c + means, 0, 1) * 255 def generate_random_lines(imshape,slant,drop_length,rain_type): drops=[] area=imshape[0]*imshape[1] no_of_drops=area//600 if rain_type.lower()=='drizzle': no_of_drops=area//770 drop_length=10 elif rain_type.lower()=='heavy': drop_length=30 elif rain_type.lower()=='torrential': no_of_drops=area//500 drop_length=60 for i in range(no_of_drops): ## If You want heavy rain, try increasing this if slant<0: x= np.random.randint(slant,imshape[1]) else: x= np.random.randint(0,imshape[1]-slant) y= np.random.randint(0,imshape[0]-drop_length) drops.append((x,y)) return drops,drop_length def rain_process(image,slant,drop_length,drop_color,drop_width,rain_drops): imshape = image.shape image_t = image.copy() for rain_drop in rain_drops: cv2.line(image_t,(rain_drop[0],rain_drop[1]),(rain_drop[0]+slant,rain_drop[1]+drop_length),drop_color,drop_width) image= cv2.blur(image_t,(7,7)) ## rainy view are blurry brightness_coefficient = 0.7 ## rainy days are usually shady image_HLS = hls(image) ## Conversion to HLS image_HLS[:,:,1] = image_HLS[:,:,1]*brightness_coefficient ## scale pixel values down for channel 1(Lightness) image_RGB= rgb(image_HLS,'hls') ## Conversion to RGB return image_RGB def hls(image,src='RGB'): image_HLS = eval('cv2.cvtColor(image,cv2.COLOR_'+src.upper()+'2HLS)') return image_HLS def rgb(image, src='BGR'): image_RGB= eval('cv2.cvtColor(image,cv2.COLOR_'+src.upper()+'2RGB)') return image_RGB def rain(image, slant=-1,drop_length=20,drop_width=1,drop_color=(200,200,200),rain_type='torrential'): ## (200,200,200) a shade of gray # verify_image(image) slant_extreme=slant # if not(is_numeric(slant_extreme) and (slant_extreme>=-20 and slant_extreme<=20)or slant_extreme==-1): # raise Exception(err_rain_slant) # if not(is_numeric(drop_width) and drop_width>=1 and drop_width<=5): # raise Exception(err_rain_width) # if not(is_numeric(drop_length) and drop_length>=0 and drop_length<=100): # raise Exception(err_rain_length) imshape = image.shape if slant_extreme==-1: slant= np.random.randint(-10,10) ##generate random slant if no slant value is given rain_drops, drop_length= generate_random_lines(imshape,slant,drop_length,rain_type) output = rain_process(image,slant_extreme,drop_length,drop_color,drop_width,rain_drops) return output def brightness(x, severity=1): # c = [.1, .2, .3, .4, .5][severity - 1] c = [.05, .1, .15, .2, .3][severity - 1] x = np.array(x) / 255. x = sk.color.rgb2hsv(x) x[:, :, 2] = np.clip(x[:, :, 2] + c, 0, 1) x = sk.color.hsv2rgb(x) return np.clip(x, 0, 1) * 255 def saturate(x, severity=1): # c = [(0.3, 0), (0.1, 0), (2, 0), (5, 0.1), (20, 0.2)][severity - 1] c = [(0.3, 0), (0.1, 0), (1.5, 0), (2, 0.1), (2.5, 0.2)][severity - 1] x = np.array(x) / 255. x = sk.color.rgb2hsv(x) x[:, :, 1] = np.clip(x[:, :, 1] * c[0] + c[1], 0, 1) x = sk.color.hsv2rgb(x) return np.clip(x, 0, 1) * 255 def jpeg_compression(x, severity=1): # c = [25, 18, 15, 10, 7][severity - 1] c = [80, 65, 58, 50, 40][severity - 1] output = BytesIO() Image.fromarray(x).save(output, 'JPEG', quality=c) x = np.array(PILImage.open(output)) return x def pixelate(x, severity=1): size = x.shape[0] # c = [0.6, 0.5, 0.4, 0.3, 0.25][severity - 1] c = [0.95, 0.9, 0.85, 0.75, 0.65][severity - 1] x = Image.fromarray(x) x = x.resize((int(size * c), int(size * c)),resample=Image.BILINEAR) x = x.resize((size, size),Image.NEAREST) return np.array(x) # mod of https://gist.github.com/erniejunior/601cdf56d2b424757de5 def elastic_transform(image, severity=1): c = [(244 * 2, 244 * 0.7, 244 * 0.1), # 244 should have been 512, but ultimately nothing is incorrect (244 * 2, 244 * 0.08, 244 * 0.2), (244 * 0.05, 244 * 0.01, 244 * 0.02), (244 * 0.07, 244 * 0.01, 244 * 0.02), (244 * 0.12, 244 * 0.01, 244 * 0.02)][severity - 1] image = np.array(image, dtype=np.float32) / 255. shape = image.shape shape_size = shape[:2] # random affine center_square = np.float32(shape_size) // 2 square_size = min(shape_size) // 3 pts1 = np.float32([center_square + square_size, [center_square[0] + square_size, center_square[1] - square_size], center_square - square_size]) pts2 = pts1 + np.random.uniform(-c[2], c[2], size=pts1.shape).astype(np.float32) M = cv2.getAffineTransform(pts1, pts2) image = cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101) dx = (gaussian(np.random.uniform(-1, 1, size=shape[:2]), c[1], mode='reflect', truncate=3) * c[0]).astype(np.float32) dy = (gaussian(np.random.uniform(-1, 1, size=shape[:2]), c[1], mode='reflect', truncate=3) * c[0]).astype(np.float32) dx, dy = dx[..., np.newaxis], dy[..., np.newaxis] x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2])) indices = np.reshape(y + dy, (-1, 1)), np.reshape(x + dx, (-1, 1)), np.reshape(z, (-1, 1)) return np.clip(map_coordinates(image, indices, order=1, mode='reflect').reshape(shape), 0, 1) * 255 def blackoutNoise(image, severity=1): image = np.zeros(image.shape, dtype=np.uint8) m = (severity, severity, severity) s = (severity, severity, severity) image = np.clip(cv2.randn(image, m, s), 0, 255) return image def additiveGaussianNoise(image, severity=1): m = (severity, severity, severity) s = (severity, severity, severity) corr = cv2.randn(np.zeros(image.shape, dtype=np.uint8), m, s) image = np.clip(image.copy() + corr, 0, 255) return image def occlusion(image, severity=1): mask = np.ones(image.shape, dtype=np.uint8) x = int(image.shape[0] * np.random.rand()) y = int(image.shape[1] * np.random.rand()) r = int((min(image.shape[:2]) / 4) * np.random.rand() + (min(image.shape[:2]) / 4)) cv2.circle(mask, (x, y), r, 0, -1) image = np.clip(image.copy() * mask, 0, 255) return image
35.990893
135
0.572144
import numpy as np from PIL import Image import skimage as sk from torchvision import transforms import torchvision.transforms.functional as F from skimage.filters import gaussian from io import BytesIO from wand.image import Image as WandImage from wand.api import library as wandlibrary import wand.color as WandColor import ctypes from PIL import Image as PILImage import cv2 from scipy.ndimage import zoom as scizoom from scipy.ndimage.interpolation import map_coordinates import warnings import os from pkg_resources import resource_filename warnings.simplefilter("ignore", UserWarning) def disk(radius, alias_blur=0.1, dtype=np.float32): if radius <= 5: L = np.arange(-5, 5 + 1) ksize = (3, 3) else: L = np.arange(-radius, radius + 1) ksize = (5, 5) X, Y = np.meshgrid(L, L) aliased_disk = np.array((X ** 2 + Y ** 2) <= radius ** 2, dtype=dtype) aliased_disk /= np.sum(aliased_disk) return cv2.GaussianBlur(aliased_disk, ksize=ksize, sigmaX=alias_blur) wandlibrary.MagickMotionBlurImage.argtypes = (ctypes.c_void_p, ctypes.c_double, ctypes.c_double, ctypes.c_double) class MotionImage(WandImage): def motion_blur(self, radius=0.0, sigma=0.0, angle=0.0): wandlibrary.MagickMotionBlurImage(self.wand, radius, sigma, angle) def plasma_fractal(mapsize=512, wibbledecay=3): assert (mapsize & (mapsize - 1) == 0) maparray = np.empty((mapsize, mapsize), dtype=np.float_) maparray[0, 0] = 0 stepsize = mapsize wibble = 100 def wibbledmean(array): return array / 4 + wibble * np.random.uniform(-wibble, wibble, array.shape) def fillsquares(): cornerref = maparray[0:mapsize:stepsize, 0:mapsize:stepsize] squareaccum = cornerref + np.roll(cornerref, shift=-1, axis=0) squareaccum += np.roll(squareaccum, shift=-1, axis=1) maparray[stepsize // 2:mapsize:stepsize, stepsize // 2:mapsize:stepsize] = wibbledmean(squareaccum) def filldiamonds(): mapsize = maparray.shape[0] drgrid = maparray[stepsize // 2:mapsize:stepsize, stepsize // 2:mapsize:stepsize] ulgrid = maparray[0:mapsize:stepsize, 0:mapsize:stepsize] ldrsum = drgrid + np.roll(drgrid, 1, axis=0) lulsum = ulgrid + np.roll(ulgrid, -1, axis=1) ltsum = ldrsum + lulsum maparray[0:mapsize:stepsize, stepsize // 2:mapsize:stepsize] = wibbledmean(ltsum) tdrsum = drgrid + np.roll(drgrid, 1, axis=1) tulsum = ulgrid + np.roll(ulgrid, -1, axis=0) ttsum = tdrsum + tulsum maparray[stepsize // 2:mapsize:stepsize, 0:mapsize:stepsize] = wibbledmean(ttsum) while stepsize >= 2: fillsquares() filldiamonds() stepsize //= 2 wibble /= wibbledecay maparray -= maparray.min() return maparray / maparray.max() def clipped_zoom(img, zoom_factor): h = img.shape[0] ch = int(np.ceil(h / float(zoom_factor))) top = (h - ch) // 2 img = scizoom(img[top:top + ch, top:top + ch], (zoom_factor, zoom_factor, 1), order=1) trim_top = (img.shape[0] - h) // 2 return img[trim_top:trim_top + h, trim_top:trim_top + h] def gaussian_noise(x, severity=1): c = [0.04, 0.06, .08, .09, .10][severity - 1] x = np.array(x) / 255. return np.clip(x + np.random.normal(size=x.shape, scale=c), 0, 1) * 255 def shot_noise(x, severity=1): c = [500, 250, 100, 75, 50][severity - 1] x = np.array(x) / 255. return np.clip(np.random.poisson(x * c) / float(c), 0, 1) * 255 def impulse_noise(x, severity=1): c = [.01, .02, .03, .05, .07][severity - 1] x = sk.util.random_noise(np.array(x) / 255., mode='s&p', amount=c) return np.clip(x, 0, 1) * 255 def speckle_noise(x, severity=1): c = [.06, .1, .12, .16, .2][severity - 1] x = np.array(x) / 255. return np.clip(x + x * np.random.normal(size=x.shape, scale=c), 0, 1) * 255 def gaussian_blur(x, severity=1): c = [.4, .6, 0.7, .8, 1][severity - 1] x = gaussian(np.array(x) / 255., sigma=c, multichannel=True) return np.clip(x, 0, 1) * 255 def glass_blur(x, severity=1): c = [(0.05,1,1), (0.25,1,1), (0.4,1,1), (0.25,1,2), (0.4,1,2)][severity - 1] x = np.uint8(gaussian(np.array(x) / 255., sigma=c[0], multichannel=True) * 255) size = x.shape[0] for i in range(c[2]): for h in range(size - c[1], c[1], -1): for w in range(size - c[1], c[1], -1): dx, dy = np.random.randint(-c[1], c[1], size=(2,)) h_prime, w_prime = h + dy, w + dx x[h, w], x[h_prime, w_prime] = x[h_prime, w_prime], x[h, w] return np.clip(gaussian(x / 255., sigma=c[0], multichannel=True), 0, 1) * 255 def defocus_blur(x, severity=1): c = [(0.3, 0.4), (0.4, 0.5), (0.5, 0.6), (1, 0.2), (1.5, 0.1)][severity - 1] x = np.array(x) / 255. kernel = disk(radius=c[0], alias_blur=c[1]) channels = [] for d in range(3): channels.append(cv2.filter2D(x[:, :, d].astype(np.float32), -1, kernel)) channels = np.array(channels).transpose((1, 2, 0)) return np.clip(channels, 0, 1) * 255 def motion_blur(x, severity=1): c = [(6,1), (6,1.5), (6,2), (8,2), (9,2.5)][severity - 1] output = BytesIO() Image.fromarray(x).save(output, format='PNG') x = MotionImage(blob=output.getvalue()) x.motion_blur(radius=c[0], sigma=c[1], angle=np.random.uniform(-45, 45)) x = cv2.imdecode(np.fromstring(x.make_blob(), np.uint8), cv2.IMREAD_UNCHANGED) if x.shape != (512, 512): return np.clip(x[..., [2, 1, 0]], 0, 255) else: return np.clip(np.array([x, x, x]).transpose((1, 2, 0)), 0, 255) def zoom_blur(x, severity=1): c = [np.arange(1, 1.06, 0.01), np.arange(1, 1.11, 0.01), np.arange(1, 1.16, 0.01), np.arange(1, 1.21, 0.01), np.arange(1, 1.26, 0.01)][severity - 1] x = (np.array(x) / 255.).astype(np.float32) out = np.zeros_like(x) for zoom_factor in c: out += clipped_zoom(x, zoom_factor) x = (x + out) / (len(c) + 1) return np.clip(x, 0, 1) * 255 def fog(x, severity=1): c = [(.2,3), (.5,3), (0.75,2.5), (1,2), (1.5,1.75)][severity - 1] size = x.shape[0] x = np.array(x) / 255. max_val = x.max() x += c[0] * plasma_fractal(wibbledecay=c[1])[:size, :size][..., np.newaxis] return np.clip(x * max_val / (max_val + c[0]), 0, 1) def frost(x, severity=1): size = x.shape[0] c = [(1, 0.2), (1, 0.3), (0.9, 0.4), (0.85, 0.4), (0.75, 0.45)][severity - 1] idx = np.random.randint(5) filename = [resource_filename(__name__, 'frost/frost1.png'), resource_filename(__name__, 'frost/frost2.png'), resource_filename(__name__, 'frost/frost3.png'), resource_filename(__name__, 'frost/frost4.jpg'), resource_filename(__name__, 'frost/frost5.jpg'), resource_filename(__name__, 'frost/frost6.jpg')][idx] frost = cv2.imread(filename) x_start, y_start = np.random.randint(0, frost.shape[0] - size), np.random.randint(0, frost.shape[1] - size) frost = frost[x_start:x_start + size, y_start:y_start + size][..., [2, 1, 0]] return np.clip(c[0] * np.array(x) + c[1] * frost, 0, 255) / 255 def snow(x, severity=1): c = [(0.1,0.2,1,0.6,8,3,0.95), (0.1,0.2,1,0.5,10,4,0.9), (0.15,0.3,1.75,0.55,10,4,0.9), (0.25,0.3,2.25,0.6,12,6,0.85), (0.3,0.3,1.25,0.65,14,12,0.8)][severity - 1] size = x.shape[0] x = np.array(x, dtype=np.float32) / 255. snow_layer = np.random.normal(size=x.shape[:2], loc=c[0], scale=c[1]) snow_layer = clipped_zoom(snow_layer[..., np.newaxis], c[2]) snow_layer[snow_layer < c[3]] = 0 snow_layer = PILImage.fromarray((np.clip(snow_layer.squeeze(), 0, 1) * 255).astype(np.uint8), mode='L') output = BytesIO() snow_layer.save(output, format='PNG') snow_layer = MotionImage(blob=output.getvalue()) snow_layer.motion_blur(radius=c[4], sigma=c[5], angle=np.random.uniform(-135, -45)) snow_layer = cv2.imdecode(np.fromstring(snow_layer.make_blob(), np.uint8), cv2.IMREAD_UNCHANGED) / 255. snow_layer = snow_layer[..., np.newaxis] x = c[6] * x + (1 - c[6]) * np.maximum(x, cv2.cvtColor(x, code = cv2.COLOR_RGB2GRAY).reshape(size, size, 1) * 1.5 + 0.5) return np.clip(x + snow_layer + np.rot90(snow_layer, k=2), 0, 1) * 255 def spatter(x, severity=1): c = [(0.62,0.1,0.7,0.7,0.5,0), (0.65,0.1,0.8,0.7,0.5,0), (0.65,0.3,1,0.69,0.5,0), (0.65,0.1,0.7,0.69,0.6,1), (0.65,0.1,0.5,0.68,0.6,1)][severity - 1] x = np.array(x, dtype=np.float32) / 255. liquid_layer = np.random.normal(size=x.shape[:2], loc=c[0], scale=c[1]) liquid_layer = gaussian(liquid_layer, sigma=c[2]) liquid_layer[liquid_layer < c[3]] = 0 if c[5] == 0: liquid_layer = (liquid_layer * 255).astype(np.uint8) dist = 255 - cv2.Canny(liquid_layer, 50, 150) dist = cv2.distanceTransform(dist, cv2.DIST_L2, 5) _, dist = cv2.threshold(dist, 20, 20, cv2.THRESH_TRUNC) dist = cv2.blur(dist, (3, 3)).astype(np.uint8) dist = cv2.equalizeHist(dist) ker = np.array([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]]) dist = cv2.filter2D(dist, cv2.CV_8U, ker) dist = cv2.blur(dist, (3, 3)).astype(np.float32) m = cv2.cvtColor(liquid_layer * dist, cv2.COLOR_GRAY2BGRA) m /= np.max(m, axis=(0, 1)) m *= c[4] color = np.concatenate((175 / 255. * np.ones_like(m[..., :1]), 238 / 255. * np.ones_like(m[..., :1]), 238 / 255. * np.ones_like(m[..., :1])), axis=2) color = cv2.cvtColor(color, cv2.COLOR_BGR2BGRA) x = cv2.cvtColor(x, cv2.COLOR_BGR2BGRA) return cv2.cvtColor(np.clip(x + m * color, 0, 1), cv2.COLOR_BGRA2BGR) * 255 else: m = np.where(liquid_layer > c[3], 1, 0) m = gaussian(m.astype(np.float32), sigma=c[4]) m[m < 0.8] = 0 color = np.concatenate((63 / 255. * np.ones_like(x[..., :1]), 42 / 255. * np.ones_like(x[..., :1]), 20 / 255. * np.ones_like(x[..., :1])), axis=2) color *= m[..., np.newaxis] x *= (1 - m[..., np.newaxis]) return np.clip(x + color, 0, 1) * 255 def contrast(x, severity=1): c = [.75, .5, .4, .3, 0.15][severity - 1] x = np.array(x) / 255. means = np.mean(x, axis=(0, 1), keepdims=True) return np.clip((x - means) * c + means, 0, 1) * 255 def generate_random_lines(imshape,slant,drop_length,rain_type): drops=[] area=imshape[0]*imshape[1] no_of_drops=area//600 if rain_type.lower()=='drizzle': no_of_drops=area//770 drop_length=10 elif rain_type.lower()=='heavy': drop_length=30 elif rain_type.lower()=='torrential': no_of_drops=area//500 drop_length=60 for i in range(no_of_drops): .randint(slant,imshape[1]) else: x= np.random.randint(0,imshape[1]-slant) y= np.random.randint(0,imshape[0]-drop_length) drops.append((x,y)) return drops,drop_length def rain_process(image,slant,drop_length,drop_color,drop_width,rain_drops): imshape = image.shape image_t = image.copy() for rain_drop in rain_drops: cv2.line(image_t,(rain_drop[0],rain_drop[1]),(rain_drop[0]+slant,rain_drop[1]+drop_length),drop_color,drop_width) image= cv2.blur(image_t,(7,7)) ient = 0.7 = image_HLS[:,:,1]*brightness_coefficient image,src='RGB'): image_HLS = eval('cv2.cvtColor(image,cv2.COLOR_'+src.upper()+'2HLS)') return image_HLS def rgb(image, src='BGR'): image_RGB= eval('cv2.cvtColor(image,cv2.COLOR_'+src.upper()+'2RGB)') return image_RGB def rain(image, slant=-1,drop_length=20,drop_width=1,drop_color=(200,200,200),rain_type='torrential'): imshape = image.shape if slant_extreme==-1: slant= np.random.randint(-10,10) es(imshape,slant,drop_length,rain_type) output = rain_process(image,slant_extreme,drop_length,drop_color,drop_width,rain_drops) return output def brightness(x, severity=1): c = [.05, .1, .15, .2, .3][severity - 1] x = np.array(x) / 255. x = sk.color.rgb2hsv(x) x[:, :, 2] = np.clip(x[:, :, 2] + c, 0, 1) x = sk.color.hsv2rgb(x) return np.clip(x, 0, 1) * 255 def saturate(x, severity=1): c = [(0.3, 0), (0.1, 0), (1.5, 0), (2, 0.1), (2.5, 0.2)][severity - 1] x = np.array(x) / 255. x = sk.color.rgb2hsv(x) x[:, :, 1] = np.clip(x[:, :, 1] * c[0] + c[1], 0, 1) x = sk.color.hsv2rgb(x) return np.clip(x, 0, 1) * 255 def jpeg_compression(x, severity=1): c = [80, 65, 58, 50, 40][severity - 1] output = BytesIO() Image.fromarray(x).save(output, 'JPEG', quality=c) x = np.array(PILImage.open(output)) return x def pixelate(x, severity=1): size = x.shape[0] c = [0.95, 0.9, 0.85, 0.75, 0.65][severity - 1] x = Image.fromarray(x) x = x.resize((int(size * c), int(size * c)),resample=Image.BILINEAR) x = x.resize((size, size),Image.NEAREST) return np.array(x) def elastic_transform(image, severity=1): c = [(244 * 2, 244 * 0.7, 244 * 0.1), (244 * 2, 244 * 0.08, 244 * 0.2), (244 * 0.05, 244 * 0.01, 244 * 0.02), (244 * 0.07, 244 * 0.01, 244 * 0.02), (244 * 0.12, 244 * 0.01, 244 * 0.02)][severity - 1] image = np.array(image, dtype=np.float32) / 255. shape = image.shape shape_size = shape[:2] center_square = np.float32(shape_size) // 2 square_size = min(shape_size) // 3 pts1 = np.float32([center_square + square_size, [center_square[0] + square_size, center_square[1] - square_size], center_square - square_size]) pts2 = pts1 + np.random.uniform(-c[2], c[2], size=pts1.shape).astype(np.float32) M = cv2.getAffineTransform(pts1, pts2) image = cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101) dx = (gaussian(np.random.uniform(-1, 1, size=shape[:2]), c[1], mode='reflect', truncate=3) * c[0]).astype(np.float32) dy = (gaussian(np.random.uniform(-1, 1, size=shape[:2]), c[1], mode='reflect', truncate=3) * c[0]).astype(np.float32) dx, dy = dx[..., np.newaxis], dy[..., np.newaxis] x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2])) indices = np.reshape(y + dy, (-1, 1)), np.reshape(x + dx, (-1, 1)), np.reshape(z, (-1, 1)) return np.clip(map_coordinates(image, indices, order=1, mode='reflect').reshape(shape), 0, 1) * 255 def blackoutNoise(image, severity=1): image = np.zeros(image.shape, dtype=np.uint8) m = (severity, severity, severity) s = (severity, severity, severity) image = np.clip(cv2.randn(image, m, s), 0, 255) return image def additiveGaussianNoise(image, severity=1): m = (severity, severity, severity) s = (severity, severity, severity) corr = cv2.randn(np.zeros(image.shape, dtype=np.uint8), m, s) image = np.clip(image.copy() + corr, 0, 255) return image def occlusion(image, severity=1): mask = np.ones(image.shape, dtype=np.uint8) x = int(image.shape[0] * np.random.rand()) y = int(image.shape[1] * np.random.rand()) r = int((min(image.shape[:2]) / 4) * np.random.rand() + (min(image.shape[:2]) / 4)) cv2.circle(mask, (x, y), r, 0, -1) image = np.clip(image.copy() * mask, 0, 255) return image
true
true
f711e1ad9b6684e8626d6ab0a391f039494f773d
726
py
Python
gym/pendulum_test.py
simondlevy/TinyNEF
2e42754cf22996c86f1e35780d77591ec2bbb658
[ "MIT" ]
2
2020-07-27T19:08:47.000Z
2020-08-11T08:46:45.000Z
gym/pendulum_test.py
steroal/TinyNEF
2e42754cf22996c86f1e35780d77591ec2bbb658
[ "MIT" ]
null
null
null
gym/pendulum_test.py
steroal/TinyNEF
2e42754cf22996c86f1e35780d77591ec2bbb658
[ "MIT" ]
1
2022-02-28T07:56:31.000Z
2022-02-28T07:56:31.000Z
#!/usr/bin/env python3 ''' Use the Neural Engineering framework to solve Pendulum via an elitist GA Copyright (C) 2020 Simon D. Levy MIT License ''' from lib import NefGym from sys import argv import pickle import numpy as np from sueap.algorithms.elitist import Elitist class NefPendulum(NefGym): def __init__(self, neurons=20, seed=None): NefGym.__init__(self, 'Pendulum-v0', neurons, seed) def activate(self, x): return np.clip(x, -2, +2) if __name__ == '__main__': if len(argv) < 2: print('Usage: python3 %s FILE' % argv[0]) exit(0) problem = NefPendulum() net = pickle.load(open(argv[1], 'rb')) print('Got reward %.3f in %d steps' % problem.test(net))
20.166667
72
0.657025
from lib import NefGym from sys import argv import pickle import numpy as np from sueap.algorithms.elitist import Elitist class NefPendulum(NefGym): def __init__(self, neurons=20, seed=None): NefGym.__init__(self, 'Pendulum-v0', neurons, seed) def activate(self, x): return np.clip(x, -2, +2) if __name__ == '__main__': if len(argv) < 2: print('Usage: python3 %s FILE' % argv[0]) exit(0) problem = NefPendulum() net = pickle.load(open(argv[1], 'rb')) print('Got reward %.3f in %d steps' % problem.test(net))
true
true
f711e2d3df36363fa62ae46ba94f0ec3e0ce9190
218
py
Python
06/solve.py
englhardt/adventofcode2019
b401b39020e23482bc88e947a361274809a61f79
[ "MIT" ]
null
null
null
06/solve.py
englhardt/adventofcode2019
b401b39020e23482bc88e947a361274809a61f79
[ "MIT" ]
null
null
null
06/solve.py
englhardt/adventofcode2019
b401b39020e23482bc88e947a361274809a61f79
[ "MIT" ]
null
null
null
import networkx as nx g = nx.Graph([x.split(")") for x in open("input.txt").read().splitlines()]) print(sum([nx.shortest_path_length(g, "COM", x) for x in g.nodes])) print(nx.shortest_path_length(g, "YOU", "SAN") - 2)
43.6
75
0.669725
import networkx as nx g = nx.Graph([x.split(")") for x in open("input.txt").read().splitlines()]) print(sum([nx.shortest_path_length(g, "COM", x) for x in g.nodes])) print(nx.shortest_path_length(g, "YOU", "SAN") - 2)
true
true
f711e3c5433ef571361309d1fd37dd4b8afe5f08
594
py
Python
opencv/commercial/Examples/Face/utils/__init__.py
SSG-DRD-IOT/commercial-iot-security-system
0c3d89b35d0468d4d3cc5ce2653b3f0ac82652a9
[ "MIT" ]
null
null
null
opencv/commercial/Examples/Face/utils/__init__.py
SSG-DRD-IOT/commercial-iot-security-system
0c3d89b35d0468d4d3cc5ce2653b3f0ac82652a9
[ "MIT" ]
null
null
null
opencv/commercial/Examples/Face/utils/__init__.py
SSG-DRD-IOT/commercial-iot-security-system
0c3d89b35d0468d4d3cc5ce2653b3f0ac82652a9
[ "MIT" ]
3
2022-01-22T05:02:41.000Z
2022-03-31T08:13:06.000Z
############################################################################### # Author: Daniil Budanov # Contact: danbudanov@gmail.com # Summer Internship - 2016 ############################################################################### # Title: __init__.py # Project: Security System # Description: # package dependency components # Last Modified: 7.14.2016 ############################################################################### # from onlinevid import * from trigger import * from sys import argv from buff import * from dtinfo import currDate, currTime from args import *
31.263158
79
0.454545
true
true
f711e3e2fd5f305700b2272fceb788f6e04b68d1
6,559
py
Python
alpha_vantage/timeseries.py
LaudateCorpus1/alpha_vantage
c637657579950d72605320c68ded42a447566cdf
[ "MIT" ]
3,865
2017-05-20T01:27:02.000Z
2022-03-30T20:50:25.000Z
alpha_vantage/timeseries.py
EmmaMuhleman1/alpha_vantage
a70f110c4883ffe66f2d1f36571a61c2a90e563d
[ "MIT" ]
286
2017-05-21T09:10:58.000Z
2022-03-26T14:26:58.000Z
alpha_vantage/timeseries.py
EmmaMuhleman1/alpha_vantage
a70f110c4883ffe66f2d1f36571a61c2a90e563d
[ "MIT" ]
743
2017-05-22T12:17:05.000Z
2022-03-26T13:22:08.000Z
from .alphavantage import AlphaVantage as av class TimeSeries(av): """This class implements all the api calls to times series """ @av._output_format @av._call_api_on_func def get_intraday(self, symbol, interval='15min', outputsize='compact'): """ Return intraday time series in two json objects as data and meta_data. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data interval: time interval between two conscutive values, supported values are '1min', '5min', '15min', '30min', '60min' (default '15min') outputsize: The size of the call, supported values are 'compact' and 'full; the first returns the last 100 points in the data series, and 'full' returns the full-length intraday times series, commonly above 1MB (default 'compact') """ _FUNCTION_KEY = "TIME_SERIES_INTRADAY" return _FUNCTION_KEY, "Time Series ({})".format(interval), 'Meta Data' @av._output_format @av._call_api_on_func def get_intraday_extended(self, symbol, interval='15min', slice='year1month1', adjusted=True): """ Return extended intraday time series in one csv_reader object. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data interval: time interval between two conscutive values, supported values are '1min', '5min', '15min', '30min', '60min' (default '15min') slice: the trailing 2 years of intraday data is evenly divided into 24 "slices" - year1month1, year1month2, ..., year2month12 adjusted: By default, adjusted=true and the output time series is adjusted by historical split and dividend events. Set adjusted=false to query raw (as-traded) intraday values. """ _FUNCTION_KEY = "TIME_SERIES_INTRADAY_EXTENDED" return _FUNCTION_KEY, "Time Series ({})".format(interval), 'Meta Data' @av._output_format @av._call_api_on_func def get_daily(self, symbol, outputsize='compact'): """ Return daily time series in two json objects as data and meta_data. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data outputsize: The size of the call, supported values are 'compact' and 'full; the first returns the last 100 points in the data series, and 'full' returns the full-length daily times series, commonly above 1MB (default 'compact') """ _FUNCTION_KEY = "TIME_SERIES_DAILY" return _FUNCTION_KEY, 'Time Series (Daily)', 'Meta Data' @av._output_format @av._call_api_on_func def get_daily_adjusted(self, symbol, outputsize='compact'): """ Return daily adjusted (date, daily open, daily high, daily low, daily close, daily split/dividend-adjusted close, daily volume) time series in two json objects as data and meta_data. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data outputsize: The size of the call, supported values are 'compact' and 'full; the first returns the last 100 points in the data series, and 'full' returns the full-length daily times series, commonly above 1MB (default 'compact') """ _FUNCTION_KEY = "TIME_SERIES_DAILY_ADJUSTED" return _FUNCTION_KEY, 'Time Series (Daily)', 'Meta Data' @av._output_format @av._call_api_on_func def get_weekly(self, symbol): """ Return weekly time series in two json objects as data and meta_data. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data """ _FUNCTION_KEY = "TIME_SERIES_WEEKLY" return _FUNCTION_KEY, 'Weekly Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_weekly_adjusted(self, symbol): """ weekly adjusted time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly adjusted close, weekly volume, weekly dividend) of the equity specified, covering up to 20 years of historical data. Keyword Arguments: symbol: the symbol for the equity we want to get its data """ _FUNCTION_KEY = "TIME_SERIES_WEEKLY_ADJUSTED" return _FUNCTION_KEY, 'Weekly Adjusted Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_monthly(self, symbol): """ Return monthly time series in two json objects as data and meta_data. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data """ _FUNCTION_KEY = "TIME_SERIES_MONTHLY" return _FUNCTION_KEY, 'Monthly Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_monthly_adjusted(self, symbol): """ Return monthly time series in two json objects as data and meta_data. It raises ValueError when problems arise Keyword Arguments: symbol: the symbol for the equity we want to get its data """ _FUNCTION_KEY = "TIME_SERIES_MONTHLY_ADJUSTED" return _FUNCTION_KEY, 'Monthly Adjusted Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_quote_endpoint(self, symbol): """ Return the latest price and volume information for a security of your choice Keyword Arguments: symbol: the symbol for the equity we want to get its data """ _FUNCTION_KEY = "GLOBAL_QUOTE" return _FUNCTION_KEY, 'Global Quote', None @av._output_format @av._call_api_on_func def get_symbol_search(self, keywords): """ Return best matching symbols and market information based on keywords. It raises ValueError when problems arise Keyword Arguments: keywords: the keywords to query on """ _FUNCTION_KEY = "SYMBOL_SEARCH" return _FUNCTION_KEY, 'bestMatches', None
41.251572
98
0.650099
from .alphavantage import AlphaVantage as av class TimeSeries(av): @av._output_format @av._call_api_on_func def get_intraday(self, symbol, interval='15min', outputsize='compact'): _FUNCTION_KEY = "TIME_SERIES_INTRADAY" return _FUNCTION_KEY, "Time Series ({})".format(interval), 'Meta Data' @av._output_format @av._call_api_on_func def get_intraday_extended(self, symbol, interval='15min', slice='year1month1', adjusted=True): _FUNCTION_KEY = "TIME_SERIES_INTRADAY_EXTENDED" return _FUNCTION_KEY, "Time Series ({})".format(interval), 'Meta Data' @av._output_format @av._call_api_on_func def get_daily(self, symbol, outputsize='compact'): _FUNCTION_KEY = "TIME_SERIES_DAILY" return _FUNCTION_KEY, 'Time Series (Daily)', 'Meta Data' @av._output_format @av._call_api_on_func def get_daily_adjusted(self, symbol, outputsize='compact'): _FUNCTION_KEY = "TIME_SERIES_DAILY_ADJUSTED" return _FUNCTION_KEY, 'Time Series (Daily)', 'Meta Data' @av._output_format @av._call_api_on_func def get_weekly(self, symbol): _FUNCTION_KEY = "TIME_SERIES_WEEKLY" return _FUNCTION_KEY, 'Weekly Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_weekly_adjusted(self, symbol): _FUNCTION_KEY = "TIME_SERIES_WEEKLY_ADJUSTED" return _FUNCTION_KEY, 'Weekly Adjusted Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_monthly(self, symbol): _FUNCTION_KEY = "TIME_SERIES_MONTHLY" return _FUNCTION_KEY, 'Monthly Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_monthly_adjusted(self, symbol): _FUNCTION_KEY = "TIME_SERIES_MONTHLY_ADJUSTED" return _FUNCTION_KEY, 'Monthly Adjusted Time Series', 'Meta Data' @av._output_format @av._call_api_on_func def get_quote_endpoint(self, symbol): _FUNCTION_KEY = "GLOBAL_QUOTE" return _FUNCTION_KEY, 'Global Quote', None @av._output_format @av._call_api_on_func def get_symbol_search(self, keywords): _FUNCTION_KEY = "SYMBOL_SEARCH" return _FUNCTION_KEY, 'bestMatches', None
true
true
f711e4440525a6691974c10a06753a57f193ef38
5,090
py
Python
docs/.src/programs/skyplot_proj/skyplotv1.py
astrax/astro2019
c1f5309415c80fbd986d6760bcb8bc095898beda
[ "MIT" ]
null
null
null
docs/.src/programs/skyplot_proj/skyplotv1.py
astrax/astro2019
c1f5309415c80fbd986d6760bcb8bc095898beda
[ "MIT" ]
null
null
null
docs/.src/programs/skyplot_proj/skyplotv1.py
astrax/astro2019
c1f5309415c80fbd986d6760bcb8bc095898beda
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Spyder Editor This is a temporary script file. """ import astropy from scipy.spatial import cKDTree import numpy as np import matplotlib.pyplot as plt data=np.genfromtxt('ybs.degbv',names=True) messier=np.genfromtxt('Messierdec.txt',names=True) vlim=4.5 magscale=10 starsize=magscale*(vlim-data['v']) #norm = ((-data['v'])-( (-data['v'])).min())/(data['v'].max()-data['v'].min()) #starsize=vlim+norm*starsize import astropy from astropy import units as u from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, AltAz starcoords=SkyCoord(ra=data['ra']*u.degree,dec=data['dec']*u.degree) mcoords=SkyCoord(ra=messier['Mra']*15.*u.degree,dec=messier['Mdec']*u.degree) CT=EarthLocation(lat=-30.159*u.deg,lon=-70.809*u.deg,height=2207.*u.m) KP=EarthLocation(lat=31.98*u.deg,lon=-111.60*u.deg,height=2097.*u.m) RM=EarthLocation(lat=28.7569*u.deg,lon=-17.8925*u.deg,height=2267.*u.m) sitecodes=['CT','KP','RM'] sitenames=['Cerro Tololo','Kitt Peak', 'La Palma'] for site in range(0,2): if site==0: obsloc=CT if site==1: obsloc=KP utcoffset=-5.0*u.hour showtime = Time('2015-7-21 22:00:00') - utcoffset showtime=Time.now() print(showtime.iso) staraltaz=starcoords.transform_to(AltAz(obstime=showtime,location=obsloc)) az2plot=np.pi/2.+np.array((3.1415926/180.)*u.degree*staraltaz.az) zd2plot=np.array(90.*u.degree-staraltaz.alt) #pos4kd=np.array([[az2plot],[zd2plot]]) upind=(zd2plot < 90.).nonzero() plt.clf() plt.figure(site+1) ax=plt.subplot(111,polar=True) ax.grid(False) ax.set_xticklabels(['W', '', 'N', '', 'E', '', 'S', '']) #plt.fill_between([0,90],[0,0],[360,360],facecolor='0') plt.scatter(az2plot[upind],zd2plot[upind],s=starsize[upind],c=data['bv'][upind],cmap='rainbow',linewidth=0,vmax=1.2,vmin=-0.5) plt.ylim([0.,90.]) cb=plt.colorbar(pad=0.10) cb.set_label('Star color, B-V') #plt.tick_params(axis='x',labelbottom='off') plt.tick_params(axis='y',labelleft='off') ax.set_xticklabels(['W', '', 'N', '', 'E', '', 'S', '']) # add parallels of declination every 30 degrees for jdec in range(5): pardeg=60.-30.*jdec parra=np.array(range(361)) skpar=SkyCoord(ra=parra*u.degree,dec=pardeg*u.degree) paraltaz=skpar.transform_to(AltAz(obstime=showtime,location=obsloc)) paraz2plot=np.pi/2.+np.array((3.14159265/180.)*u.degree*paraltaz.az) parzd2plot=np.array(90.*u.degree-paraltaz.alt) plt.plot(paraz2plot,parzd2plot,linewidth=1,color='gray',linestyle=':') # plot Messier objects maltaz=mcoords.transform_to(AltAz(obstime=showtime,location=obsloc)) maz2plot=np.pi/2.+np.array((3.1415926/180.)*u.degree*maltaz.az) mzd2plot=np.array(90.*u.degree-maltaz.alt) upm=(mzd2plot < 90.).nonzero() #plt.scatter(maz2plot[upm],mzd2plot[upm],s=100,c=messier['Mclass'][upm],cmap='rainbow',alpha=0.4,linewidth=0) plt.title(str(sitenames[site])+' '+showtime.iso+' UT\n') labelcolors=np.array(['blue','blue','green','orange','red']) mlabels=np.array(['{0}'.format(i+1) for i in range(110)]) for j in range(110): plt.annotate(mlabels[j],xy=(maz2plot[j],mzd2plot[j]),xytext=(0,0),textcoords='offset points',color=labelcolors[messier['Mclass'][j]],size='small') #add Magellanic clouds sklmc=SkyCoord(ra=15.0*5.25*u.degree,dec=-68.7*u.degree) sksmc=SkyCoord(ra=0.77*15.0*u.degree,dec=-73.0*u.degree) lmcaltaz=sklmc.transform_to(AltAz(obstime=showtime,location=obsloc)) smcaltaz=sksmc.transform_to(AltAz(obstime=showtime,location=obsloc)) plt.scatter(np.pi/2.+np.array((3.1415926/180.)*u.degree*lmcaltaz.az),90.*u.degree-lmcaltaz.alt,s=250,c='green',alpha=0.3) plt.scatter(np.pi/2.+np.array((3.1415926/180.)*u.degree*smcaltaz.az),90.*u.degree-smcaltaz.alt,s=120,c='green',alpha=0.3) #add constellation lines conlines=np.genfromtxt('constellations.txt',names="star1, star2") nstar1=np.array(conlines['star1']) nstar2=np.array(conlines['star2']) nstars=nstar1.size starnumbers=np.array(data['starnum']) for jstar in range(nstars): indexstar1=np.where(starnumbers==nstar1[jstar])[0] indexstar2=np.where(data['starnum']==nstar2[jstar])[0] plotx=np.array((az2plot[indexstar1],az2plot[indexstar2])) ploty=np.array((zd2plot[indexstar1],zd2plot[indexstar2])) plt.plot(plotx,ploty,linewidth=1,color='black',zorder=0) plt.annotate('Messier Objects:',xy=(0.04,0.18),xycoords='figure fraction') plt.annotate('Nebula',xy=(0.05,0.145),xycoords='figure fraction',color='blue') plt.annotate('Galaxy',xy=(0.05,0.11),xycoords='figure fraction',color='green') plt.annotate('Open cluster',xy=(0.05,0.075),xycoords='figure fraction',color='orange') plt.annotate('Globular cluster',xy=(0.05,0.04),xycoords='figure fraction',color='red') plt.show() if site==0: plt.savefig('SkyplotCTIO.png') if site==1: plt.savefig('SkyplotKPNO.png')
41.048387
154
0.665815
import astropy from scipy.spatial import cKDTree import numpy as np import matplotlib.pyplot as plt data=np.genfromtxt('ybs.degbv',names=True) messier=np.genfromtxt('Messierdec.txt',names=True) vlim=4.5 magscale=10 starsize=magscale*(vlim-data['v']) import astropy from astropy import units as u from astropy.time import Time from astropy.coordinates import SkyCoord, EarthLocation, AltAz starcoords=SkyCoord(ra=data['ra']*u.degree,dec=data['dec']*u.degree) mcoords=SkyCoord(ra=messier['Mra']*15.*u.degree,dec=messier['Mdec']*u.degree) CT=EarthLocation(lat=-30.159*u.deg,lon=-70.809*u.deg,height=2207.*u.m) KP=EarthLocation(lat=31.98*u.deg,lon=-111.60*u.deg,height=2097.*u.m) RM=EarthLocation(lat=28.7569*u.deg,lon=-17.8925*u.deg,height=2267.*u.m) sitecodes=['CT','KP','RM'] sitenames=['Cerro Tololo','Kitt Peak', 'La Palma'] for site in range(0,2): if site==0: obsloc=CT if site==1: obsloc=KP utcoffset=-5.0*u.hour showtime = Time('2015-7-21 22:00:00') - utcoffset showtime=Time.now() print(showtime.iso) staraltaz=starcoords.transform_to(AltAz(obstime=showtime,location=obsloc)) az2plot=np.pi/2.+np.array((3.1415926/180.)*u.degree*staraltaz.az) zd2plot=np.array(90.*u.degree-staraltaz.alt) upind=(zd2plot < 90.).nonzero() plt.clf() plt.figure(site+1) ax=plt.subplot(111,polar=True) ax.grid(False) ax.set_xticklabels(['W', '', 'N', '', 'E', '', 'S', '']) plt.scatter(az2plot[upind],zd2plot[upind],s=starsize[upind],c=data['bv'][upind],cmap='rainbow',linewidth=0,vmax=1.2,vmin=-0.5) plt.ylim([0.,90.]) cb=plt.colorbar(pad=0.10) cb.set_label('Star color, B-V') plt.tick_params(axis='y',labelleft='off') ax.set_xticklabels(['W', '', 'N', '', 'E', '', 'S', '']) for jdec in range(5): pardeg=60.-30.*jdec parra=np.array(range(361)) skpar=SkyCoord(ra=parra*u.degree,dec=pardeg*u.degree) paraltaz=skpar.transform_to(AltAz(obstime=showtime,location=obsloc)) paraz2plot=np.pi/2.+np.array((3.14159265/180.)*u.degree*paraltaz.az) parzd2plot=np.array(90.*u.degree-paraltaz.alt) plt.plot(paraz2plot,parzd2plot,linewidth=1,color='gray',linestyle=':') maltaz=mcoords.transform_to(AltAz(obstime=showtime,location=obsloc)) maz2plot=np.pi/2.+np.array((3.1415926/180.)*u.degree*maltaz.az) mzd2plot=np.array(90.*u.degree-maltaz.alt) upm=(mzd2plot < 90.).nonzero() plt.title(str(sitenames[site])+' '+showtime.iso+' UT\n') labelcolors=np.array(['blue','blue','green','orange','red']) mlabels=np.array(['{0}'.format(i+1) for i in range(110)]) for j in range(110): plt.annotate(mlabels[j],xy=(maz2plot[j],mzd2plot[j]),xytext=(0,0),textcoords='offset points',color=labelcolors[messier['Mclass'][j]],size='small') sklmc=SkyCoord(ra=15.0*5.25*u.degree,dec=-68.7*u.degree) sksmc=SkyCoord(ra=0.77*15.0*u.degree,dec=-73.0*u.degree) lmcaltaz=sklmc.transform_to(AltAz(obstime=showtime,location=obsloc)) smcaltaz=sksmc.transform_to(AltAz(obstime=showtime,location=obsloc)) plt.scatter(np.pi/2.+np.array((3.1415926/180.)*u.degree*lmcaltaz.az),90.*u.degree-lmcaltaz.alt,s=250,c='green',alpha=0.3) plt.scatter(np.pi/2.+np.array((3.1415926/180.)*u.degree*smcaltaz.az),90.*u.degree-smcaltaz.alt,s=120,c='green',alpha=0.3) conlines=np.genfromtxt('constellations.txt',names="star1, star2") nstar1=np.array(conlines['star1']) nstar2=np.array(conlines['star2']) nstars=nstar1.size starnumbers=np.array(data['starnum']) for jstar in range(nstars): indexstar1=np.where(starnumbers==nstar1[jstar])[0] indexstar2=np.where(data['starnum']==nstar2[jstar])[0] plotx=np.array((az2plot[indexstar1],az2plot[indexstar2])) ploty=np.array((zd2plot[indexstar1],zd2plot[indexstar2])) plt.plot(plotx,ploty,linewidth=1,color='black',zorder=0) plt.annotate('Messier Objects:',xy=(0.04,0.18),xycoords='figure fraction') plt.annotate('Nebula',xy=(0.05,0.145),xycoords='figure fraction',color='blue') plt.annotate('Galaxy',xy=(0.05,0.11),xycoords='figure fraction',color='green') plt.annotate('Open cluster',xy=(0.05,0.075),xycoords='figure fraction',color='orange') plt.annotate('Globular cluster',xy=(0.05,0.04),xycoords='figure fraction',color='red') plt.show() if site==0: plt.savefig('SkyplotCTIO.png') if site==1: plt.savefig('SkyplotKPNO.png')
true
true
f711e4d09ef2807e9e9b94a7a6071bf563e0de28
19,340
py
Python
stable_baselines/a2c/a2c.py
ClementRolinat/stable-baselines
333c59379f23e1f5c5c9e8bf93cbfa56ac52d13b
[ "MIT" ]
1
2020-03-25T14:02:31.000Z
2020-03-25T14:02:31.000Z
stable_baselines/a2c/a2c.py
ClementRolinat/stable-baselines
333c59379f23e1f5c5c9e8bf93cbfa56ac52d13b
[ "MIT" ]
null
null
null
stable_baselines/a2c/a2c.py
ClementRolinat/stable-baselines
333c59379f23e1f5c5c9e8bf93cbfa56ac52d13b
[ "MIT" ]
1
2019-10-07T22:18:00.000Z
2019-10-07T22:18:00.000Z
import time from collections import deque import gym import numpy as np import tensorflow as tf from stable_baselines import logger from stable_baselines.common import explained_variance, tf_util, ActorCriticRLModel, SetVerbosity, TensorboardWriter from stable_baselines.common.policies import ActorCriticPolicy, RecurrentActorCriticPolicy from stable_baselines.common.runners import AbstractEnvRunner from stable_baselines.a2c.utils import discount_with_dones, Scheduler, find_trainable_variables, mse, \ total_episode_reward_logger from stable_baselines.ppo2.ppo2 import safe_mean class A2C(ActorCriticRLModel): """ The A2C (Advantage Actor Critic) model class, https://arxiv.org/abs/1602.01783 :param policy: (ActorCriticPolicy or str) The policy model to use (MlpPolicy, CnnPolicy, CnnLstmPolicy, ...) :param env: (Gym environment or str) The environment to learn from (if registered in Gym, can be str) :param gamma: (float) Discount factor :param n_steps: (int) The number of steps to run for each environment per update (i.e. batch size is n_steps * n_env where n_env is number of environment copies running in parallel) :param vf_coef: (float) Value function coefficient for the loss calculation :param ent_coef: (float) Entropy coefficient for the loss caculation :param max_grad_norm: (float) The maximum value for the gradient clipping :param learning_rate: (float) The learning rate :param alpha: (float) RMSProp decay parameter (default: 0.99) :param epsilon: (float) RMSProp epsilon (stabilizes square root computation in denominator of RMSProp update) (default: 1e-5) :param lr_schedule: (str) The type of scheduler for the learning rate update ('linear', 'constant', 'double_linear_con', 'middle_drop' or 'double_middle_drop') :param verbose: (int) the verbosity level: 0 none, 1 training information, 2 tensorflow debug :param tensorboard_log: (str) the log location for tensorboard (if None, no logging) :param _init_setup_model: (bool) Whether or not to build the network at the creation of the instance (used only for loading) :param policy_kwargs: (dict) additional arguments to be passed to the policy on creation :param full_tensorboard_log: (bool) enable additional logging when using tensorboard WARNING: this logging can take a lot of space quickly """ def __init__(self, policy, env, gamma=0.99, n_steps=5, vf_coef=0.25, ent_coef=0.01, max_grad_norm=0.5, learning_rate=7e-4, alpha=0.99, epsilon=1e-5, lr_schedule='constant', verbose=0, tensorboard_log=None, _init_setup_model=True, policy_kwargs=None, full_tensorboard_log=False): super(A2C, self).__init__(policy=policy, env=env, verbose=verbose, requires_vec_env=True, _init_setup_model=_init_setup_model, policy_kwargs=policy_kwargs) self.n_steps = n_steps self.gamma = gamma self.vf_coef = vf_coef self.ent_coef = ent_coef self.max_grad_norm = max_grad_norm self.alpha = alpha self.epsilon = epsilon self.lr_schedule = lr_schedule self.learning_rate = learning_rate self.tensorboard_log = tensorboard_log self.full_tensorboard_log = full_tensorboard_log self.graph = None self.sess = None self.learning_rate_ph = None self.n_batch = None self.actions_ph = None self.advs_ph = None self.rewards_ph = None self.pg_loss = None self.vf_loss = None self.entropy = None self.params = None self.apply_backprop = None self.train_model = None self.step_model = None self.step = None self.proba_step = None self.value = None self.initial_state = None self.learning_rate_schedule = None self.summary = None self.episode_reward = None # if we are loading, it is possible the environment is not known, however the obs and action space are known if _init_setup_model: self.setup_model() def _get_pretrain_placeholders(self): policy = self.train_model if isinstance(self.action_space, gym.spaces.Discrete): return policy.obs_ph, self.actions_ph, policy.policy return policy.obs_ph, self.actions_ph, policy.deterministic_action def setup_model(self): with SetVerbosity(self.verbose): assert issubclass(self.policy, ActorCriticPolicy), "Error: the input policy for the A2C model must be an " \ "instance of common.policies.ActorCriticPolicy." self.graph = tf.Graph() with self.graph.as_default(): self.sess = tf_util.make_session(graph=self.graph) self.n_batch = self.n_envs * self.n_steps n_batch_step = None n_batch_train = None if issubclass(self.policy, RecurrentActorCriticPolicy): n_batch_step = self.n_envs n_batch_train = self.n_envs * self.n_steps step_model = self.policy(self.sess, self.observation_space, self.action_space, self.n_envs, 1, n_batch_step, reuse=False, **self.policy_kwargs) with tf.variable_scope("train_model", reuse=True, custom_getter=tf_util.outer_scope_getter("train_model")): train_model = self.policy(self.sess, self.observation_space, self.action_space, self.n_envs, self.n_steps, n_batch_train, reuse=True, **self.policy_kwargs) with tf.variable_scope("loss", reuse=False): self.actions_ph = train_model.pdtype.sample_placeholder([None], name="action_ph") self.advs_ph = tf.placeholder(tf.float32, [None], name="advs_ph") self.rewards_ph = tf.placeholder(tf.float32, [None], name="rewards_ph") self.learning_rate_ph = tf.placeholder(tf.float32, [], name="learning_rate_ph") neglogpac = train_model.proba_distribution.neglogp(self.actions_ph) self.entropy = tf.reduce_mean(train_model.proba_distribution.entropy()) self.pg_loss = tf.reduce_mean(self.advs_ph * neglogpac) self.vf_loss = mse(tf.squeeze(train_model.value_flat), self.rewards_ph) # https://arxiv.org/pdf/1708.04782.pdf#page=9, https://arxiv.org/pdf/1602.01783.pdf#page=4 # and https://github.com/dennybritz/reinforcement-learning/issues/34 # suggest to add an entropy component in order to improve exploration. loss = self.pg_loss - self.entropy * self.ent_coef + self.vf_loss * self.vf_coef tf.summary.scalar('entropy_loss', self.entropy) tf.summary.scalar('policy_gradient_loss', self.pg_loss) tf.summary.scalar('value_function_loss', self.vf_loss) tf.summary.scalar('loss', loss) self.params = find_trainable_variables("model") grads = tf.gradients(loss, self.params) if self.max_grad_norm is not None: grads, _ = tf.clip_by_global_norm(grads, self.max_grad_norm) grads = list(zip(grads, self.params)) with tf.variable_scope("input_info", reuse=False): tf.summary.scalar('discounted_rewards', tf.reduce_mean(self.rewards_ph)) tf.summary.scalar('learning_rate', tf.reduce_mean(self.learning_rate)) tf.summary.scalar('advantage', tf.reduce_mean(self.advs_ph)) if self.full_tensorboard_log: tf.summary.histogram('discounted_rewards', self.rewards_ph) tf.summary.histogram('learning_rate', self.learning_rate) tf.summary.histogram('advantage', self.advs_ph) if tf_util.is_image(self.observation_space): tf.summary.image('observation', train_model.obs_ph) else: tf.summary.histogram('observation', train_model.obs_ph) trainer = tf.train.RMSPropOptimizer(learning_rate=self.learning_rate_ph, decay=self.alpha, epsilon=self.epsilon) self.apply_backprop = trainer.apply_gradients(grads) self.train_model = train_model self.step_model = step_model self.step = step_model.step self.proba_step = step_model.proba_step self.value = step_model.value self.initial_state = step_model.initial_state tf.global_variables_initializer().run(session=self.sess) self.summary = tf.summary.merge_all() def _train_step(self, obs, states, rewards, masks, actions, values, update, writer=None): """ applies a training step to the model :param obs: ([float]) The input observations :param states: ([float]) The states (used for recurrent policies) :param rewards: ([float]) The rewards from the environment :param masks: ([bool]) Whether or not the episode is over (used for recurrent policies) :param actions: ([float]) The actions taken :param values: ([float]) The logits values :param update: (int) the current step iteration :param writer: (TensorFlow Summary.writer) the writer for tensorboard :return: (float, float, float) policy loss, value loss, policy entropy """ advs = rewards - values cur_lr = None for _ in range(len(obs)): cur_lr = self.learning_rate_schedule.value() assert cur_lr is not None, "Error: the observation input array cannon be empty" td_map = {self.train_model.obs_ph: obs, self.actions_ph: actions, self.advs_ph: advs, self.rewards_ph: rewards, self.learning_rate_ph: cur_lr} if states is not None: td_map[self.train_model.states_ph] = states td_map[self.train_model.dones_ph] = masks if writer is not None: # run loss backprop with summary, but once every 10 runs save the metadata (memory, compute time, ...) if self.full_tensorboard_log and (1 + update) % 10 == 0: run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() summary, policy_loss, value_loss, policy_entropy, _ = self.sess.run( [self.summary, self.pg_loss, self.vf_loss, self.entropy, self.apply_backprop], td_map, options=run_options, run_metadata=run_metadata) writer.add_run_metadata(run_metadata, 'step%d' % (update * (self.n_batch + 1))) else: summary, policy_loss, value_loss, policy_entropy, _ = self.sess.run( [self.summary, self.pg_loss, self.vf_loss, self.entropy, self.apply_backprop], td_map) writer.add_summary(summary, update * (self.n_batch + 1)) else: policy_loss, value_loss, policy_entropy, _ = self.sess.run( [self.pg_loss, self.vf_loss, self.entropy, self.apply_backprop], td_map) return policy_loss, value_loss, policy_entropy def learn(self, total_timesteps, callback=None, seed=None, log_interval=100, tb_log_name="A2C", reset_num_timesteps=True): new_tb_log = self._init_num_timesteps(reset_num_timesteps) with SetVerbosity(self.verbose), TensorboardWriter(self.graph, self.tensorboard_log, tb_log_name, new_tb_log) \ as writer: self._setup_learn(seed) self.learning_rate_schedule = Scheduler(initial_value=self.learning_rate, n_values=total_timesteps, schedule=self.lr_schedule) runner = A2CRunner(self.env, self, n_steps=self.n_steps, gamma=self.gamma) self.episode_reward = np.zeros((self.n_envs,)) # Training stats (when using Monitor wrapper) ep_info_buf = deque(maxlen=100) t_start = time.time() for update in range(1, total_timesteps // self.n_batch + 1): # true_reward is the reward without discount obs, states, rewards, masks, actions, values, ep_infos, true_reward = runner.run() ep_info_buf.extend(ep_infos) _, value_loss, policy_entropy = self._train_step(obs, states, rewards, masks, actions, values, self.num_timesteps // (self.n_batch + 1), writer) n_seconds = time.time() - t_start fps = int((update * self.n_batch) / n_seconds) if writer is not None: self.episode_reward = total_episode_reward_logger(self.episode_reward, true_reward.reshape((self.n_envs, self.n_steps)), masks.reshape((self.n_envs, self.n_steps)), writer, self.num_timesteps) self.num_timesteps += self.n_batch + 1 if callback is not None: # Only stop training if return value is False, not when it is None. This is for backwards # compatibility with callbacks that have no return statement. if callback(locals(), globals()) is False: break if self.verbose >= 1 and (update % log_interval == 0 or update == 1): explained_var = explained_variance(values, rewards) logger.record_tabular("nupdates", update) logger.record_tabular("total_timesteps", self.num_timesteps) logger.record_tabular("fps", fps) logger.record_tabular("policy_entropy", float(policy_entropy)) logger.record_tabular("value_loss", float(value_loss)) logger.record_tabular("explained_variance", float(explained_var)) if len(ep_info_buf) > 0 and len(ep_info_buf[0]) > 0: logger.logkv('ep_reward_mean', safe_mean([ep_info['r'] for ep_info in ep_info_buf])) logger.logkv('ep_len_mean', safe_mean([ep_info['l'] for ep_info in ep_info_buf])) logger.dump_tabular() return self def save(self, save_path): data = { "gamma": self.gamma, "n_steps": self.n_steps, "vf_coef": self.vf_coef, "ent_coef": self.ent_coef, "max_grad_norm": self.max_grad_norm, "learning_rate": self.learning_rate, "alpha": self.alpha, "epsilon": self.epsilon, "lr_schedule": self.lr_schedule, "verbose": self.verbose, "policy": self.policy, "observation_space": self.observation_space, "action_space": self.action_space, "n_envs": self.n_envs, "_vectorize_action": self._vectorize_action, "policy_kwargs": self.policy_kwargs } params = self.sess.run(self.params) self._save_to_file(save_path, data=data, params=params) class A2CRunner(AbstractEnvRunner): def __init__(self, env, model, n_steps=5, gamma=0.99): """ A runner to learn the policy of an environment for an a2c model :param env: (Gym environment) The environment to learn from :param model: (Model) The model to learn :param n_steps: (int) The number of steps to run for each environment :param gamma: (float) Discount factor """ super(A2CRunner, self).__init__(env=env, model=model, n_steps=n_steps) self.gamma = gamma def run(self): """ Run a learning step of the model :return: ([float], [float], [float], [bool], [float], [float]) observations, states, rewards, masks, actions, values """ mb_obs, mb_rewards, mb_actions, mb_values, mb_dones = [], [], [], [], [] mb_states = self.states ep_infos = [] for _ in range(self.n_steps): actions, values, states, _ = self.model.step(self.obs, self.states, self.dones) mb_obs.append(np.copy(self.obs)) mb_actions.append(actions) mb_values.append(values) mb_dones.append(self.dones) clipped_actions = actions # Clip the actions to avoid out of bound error if isinstance(self.env.action_space, gym.spaces.Box): clipped_actions = np.clip(actions, self.env.action_space.low, self.env.action_space.high) obs, rewards, dones, infos = self.env.step(clipped_actions) for info in infos: maybe_ep_info = info.get('episode') if maybe_ep_info is not None: ep_infos.append(maybe_ep_info) self.states = states self.dones = dones self.obs = obs mb_rewards.append(rewards) mb_dones.append(self.dones) # batch of steps to batch of rollouts mb_obs = np.asarray(mb_obs, dtype=self.obs.dtype).swapaxes(1, 0).reshape(self.batch_ob_shape) mb_rewards = np.asarray(mb_rewards, dtype=np.float32).swapaxes(0, 1) mb_actions = np.asarray(mb_actions, dtype=self.env.action_space.dtype).swapaxes(0, 1) mb_values = np.asarray(mb_values, dtype=np.float32).swapaxes(0, 1) mb_dones = np.asarray(mb_dones, dtype=np.bool).swapaxes(0, 1) mb_masks = mb_dones[:, :-1] mb_dones = mb_dones[:, 1:] true_rewards = np.copy(mb_rewards) last_values = self.model.value(self.obs, self.states, self.dones).tolist() # discount/bootstrap off value fn for n, (rewards, dones, value) in enumerate(zip(mb_rewards, mb_dones, last_values)): rewards = rewards.tolist() dones = dones.tolist() if dones[-1] == 0: rewards = discount_with_dones(rewards + [value], dones + [0], self.gamma)[:-1] else: rewards = discount_with_dones(rewards, dones, self.gamma) mb_rewards[n] = rewards # convert from [n_env, n_steps, ...] to [n_steps * n_env, ...] mb_rewards = mb_rewards.reshape(-1, *mb_rewards.shape[2:]) mb_actions = mb_actions.reshape(-1, *mb_actions.shape[2:]) mb_values = mb_values.reshape(-1, *mb_values.shape[2:]) mb_masks = mb_masks.reshape(-1, *mb_masks.shape[2:]) true_rewards = true_rewards.reshape(-1, *true_rewards.shape[2:]) return mb_obs, mb_states, mb_rewards, mb_masks, mb_actions, mb_values, ep_infos, true_rewards
52.12938
120
0.611996
import time from collections import deque import gym import numpy as np import tensorflow as tf from stable_baselines import logger from stable_baselines.common import explained_variance, tf_util, ActorCriticRLModel, SetVerbosity, TensorboardWriter from stable_baselines.common.policies import ActorCriticPolicy, RecurrentActorCriticPolicy from stable_baselines.common.runners import AbstractEnvRunner from stable_baselines.a2c.utils import discount_with_dones, Scheduler, find_trainable_variables, mse, \ total_episode_reward_logger from stable_baselines.ppo2.ppo2 import safe_mean class A2C(ActorCriticRLModel): def __init__(self, policy, env, gamma=0.99, n_steps=5, vf_coef=0.25, ent_coef=0.01, max_grad_norm=0.5, learning_rate=7e-4, alpha=0.99, epsilon=1e-5, lr_schedule='constant', verbose=0, tensorboard_log=None, _init_setup_model=True, policy_kwargs=None, full_tensorboard_log=False): super(A2C, self).__init__(policy=policy, env=env, verbose=verbose, requires_vec_env=True, _init_setup_model=_init_setup_model, policy_kwargs=policy_kwargs) self.n_steps = n_steps self.gamma = gamma self.vf_coef = vf_coef self.ent_coef = ent_coef self.max_grad_norm = max_grad_norm self.alpha = alpha self.epsilon = epsilon self.lr_schedule = lr_schedule self.learning_rate = learning_rate self.tensorboard_log = tensorboard_log self.full_tensorboard_log = full_tensorboard_log self.graph = None self.sess = None self.learning_rate_ph = None self.n_batch = None self.actions_ph = None self.advs_ph = None self.rewards_ph = None self.pg_loss = None self.vf_loss = None self.entropy = None self.params = None self.apply_backprop = None self.train_model = None self.step_model = None self.step = None self.proba_step = None self.value = None self.initial_state = None self.learning_rate_schedule = None self.summary = None self.episode_reward = None if _init_setup_model: self.setup_model() def _get_pretrain_placeholders(self): policy = self.train_model if isinstance(self.action_space, gym.spaces.Discrete): return policy.obs_ph, self.actions_ph, policy.policy return policy.obs_ph, self.actions_ph, policy.deterministic_action def setup_model(self): with SetVerbosity(self.verbose): assert issubclass(self.policy, ActorCriticPolicy), "Error: the input policy for the A2C model must be an " \ "instance of common.policies.ActorCriticPolicy." self.graph = tf.Graph() with self.graph.as_default(): self.sess = tf_util.make_session(graph=self.graph) self.n_batch = self.n_envs * self.n_steps n_batch_step = None n_batch_train = None if issubclass(self.policy, RecurrentActorCriticPolicy): n_batch_step = self.n_envs n_batch_train = self.n_envs * self.n_steps step_model = self.policy(self.sess, self.observation_space, self.action_space, self.n_envs, 1, n_batch_step, reuse=False, **self.policy_kwargs) with tf.variable_scope("train_model", reuse=True, custom_getter=tf_util.outer_scope_getter("train_model")): train_model = self.policy(self.sess, self.observation_space, self.action_space, self.n_envs, self.n_steps, n_batch_train, reuse=True, **self.policy_kwargs) with tf.variable_scope("loss", reuse=False): self.actions_ph = train_model.pdtype.sample_placeholder([None], name="action_ph") self.advs_ph = tf.placeholder(tf.float32, [None], name="advs_ph") self.rewards_ph = tf.placeholder(tf.float32, [None], name="rewards_ph") self.learning_rate_ph = tf.placeholder(tf.float32, [], name="learning_rate_ph") neglogpac = train_model.proba_distribution.neglogp(self.actions_ph) self.entropy = tf.reduce_mean(train_model.proba_distribution.entropy()) self.pg_loss = tf.reduce_mean(self.advs_ph * neglogpac) self.vf_loss = mse(tf.squeeze(train_model.value_flat), self.rewards_ph) loss = self.pg_loss - self.entropy * self.ent_coef + self.vf_loss * self.vf_coef tf.summary.scalar('entropy_loss', self.entropy) tf.summary.scalar('policy_gradient_loss', self.pg_loss) tf.summary.scalar('value_function_loss', self.vf_loss) tf.summary.scalar('loss', loss) self.params = find_trainable_variables("model") grads = tf.gradients(loss, self.params) if self.max_grad_norm is not None: grads, _ = tf.clip_by_global_norm(grads, self.max_grad_norm) grads = list(zip(grads, self.params)) with tf.variable_scope("input_info", reuse=False): tf.summary.scalar('discounted_rewards', tf.reduce_mean(self.rewards_ph)) tf.summary.scalar('learning_rate', tf.reduce_mean(self.learning_rate)) tf.summary.scalar('advantage', tf.reduce_mean(self.advs_ph)) if self.full_tensorboard_log: tf.summary.histogram('discounted_rewards', self.rewards_ph) tf.summary.histogram('learning_rate', self.learning_rate) tf.summary.histogram('advantage', self.advs_ph) if tf_util.is_image(self.observation_space): tf.summary.image('observation', train_model.obs_ph) else: tf.summary.histogram('observation', train_model.obs_ph) trainer = tf.train.RMSPropOptimizer(learning_rate=self.learning_rate_ph, decay=self.alpha, epsilon=self.epsilon) self.apply_backprop = trainer.apply_gradients(grads) self.train_model = train_model self.step_model = step_model self.step = step_model.step self.proba_step = step_model.proba_step self.value = step_model.value self.initial_state = step_model.initial_state tf.global_variables_initializer().run(session=self.sess) self.summary = tf.summary.merge_all() def _train_step(self, obs, states, rewards, masks, actions, values, update, writer=None): advs = rewards - values cur_lr = None for _ in range(len(obs)): cur_lr = self.learning_rate_schedule.value() assert cur_lr is not None, "Error: the observation input array cannon be empty" td_map = {self.train_model.obs_ph: obs, self.actions_ph: actions, self.advs_ph: advs, self.rewards_ph: rewards, self.learning_rate_ph: cur_lr} if states is not None: td_map[self.train_model.states_ph] = states td_map[self.train_model.dones_ph] = masks if writer is not None: if self.full_tensorboard_log and (1 + update) % 10 == 0: run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() summary, policy_loss, value_loss, policy_entropy, _ = self.sess.run( [self.summary, self.pg_loss, self.vf_loss, self.entropy, self.apply_backprop], td_map, options=run_options, run_metadata=run_metadata) writer.add_run_metadata(run_metadata, 'step%d' % (update * (self.n_batch + 1))) else: summary, policy_loss, value_loss, policy_entropy, _ = self.sess.run( [self.summary, self.pg_loss, self.vf_loss, self.entropy, self.apply_backprop], td_map) writer.add_summary(summary, update * (self.n_batch + 1)) else: policy_loss, value_loss, policy_entropy, _ = self.sess.run( [self.pg_loss, self.vf_loss, self.entropy, self.apply_backprop], td_map) return policy_loss, value_loss, policy_entropy def learn(self, total_timesteps, callback=None, seed=None, log_interval=100, tb_log_name="A2C", reset_num_timesteps=True): new_tb_log = self._init_num_timesteps(reset_num_timesteps) with SetVerbosity(self.verbose), TensorboardWriter(self.graph, self.tensorboard_log, tb_log_name, new_tb_log) \ as writer: self._setup_learn(seed) self.learning_rate_schedule = Scheduler(initial_value=self.learning_rate, n_values=total_timesteps, schedule=self.lr_schedule) runner = A2CRunner(self.env, self, n_steps=self.n_steps, gamma=self.gamma) self.episode_reward = np.zeros((self.n_envs,)) ep_info_buf = deque(maxlen=100) t_start = time.time() for update in range(1, total_timesteps // self.n_batch + 1): obs, states, rewards, masks, actions, values, ep_infos, true_reward = runner.run() ep_info_buf.extend(ep_infos) _, value_loss, policy_entropy = self._train_step(obs, states, rewards, masks, actions, values, self.num_timesteps // (self.n_batch + 1), writer) n_seconds = time.time() - t_start fps = int((update * self.n_batch) / n_seconds) if writer is not None: self.episode_reward = total_episode_reward_logger(self.episode_reward, true_reward.reshape((self.n_envs, self.n_steps)), masks.reshape((self.n_envs, self.n_steps)), writer, self.num_timesteps) self.num_timesteps += self.n_batch + 1 if callback is not None: if callback(locals(), globals()) is False: break if self.verbose >= 1 and (update % log_interval == 0 or update == 1): explained_var = explained_variance(values, rewards) logger.record_tabular("nupdates", update) logger.record_tabular("total_timesteps", self.num_timesteps) logger.record_tabular("fps", fps) logger.record_tabular("policy_entropy", float(policy_entropy)) logger.record_tabular("value_loss", float(value_loss)) logger.record_tabular("explained_variance", float(explained_var)) if len(ep_info_buf) > 0 and len(ep_info_buf[0]) > 0: logger.logkv('ep_reward_mean', safe_mean([ep_info['r'] for ep_info in ep_info_buf])) logger.logkv('ep_len_mean', safe_mean([ep_info['l'] for ep_info in ep_info_buf])) logger.dump_tabular() return self def save(self, save_path): data = { "gamma": self.gamma, "n_steps": self.n_steps, "vf_coef": self.vf_coef, "ent_coef": self.ent_coef, "max_grad_norm": self.max_grad_norm, "learning_rate": self.learning_rate, "alpha": self.alpha, "epsilon": self.epsilon, "lr_schedule": self.lr_schedule, "verbose": self.verbose, "policy": self.policy, "observation_space": self.observation_space, "action_space": self.action_space, "n_envs": self.n_envs, "_vectorize_action": self._vectorize_action, "policy_kwargs": self.policy_kwargs } params = self.sess.run(self.params) self._save_to_file(save_path, data=data, params=params) class A2CRunner(AbstractEnvRunner): def __init__(self, env, model, n_steps=5, gamma=0.99): super(A2CRunner, self).__init__(env=env, model=model, n_steps=n_steps) self.gamma = gamma def run(self): mb_obs, mb_rewards, mb_actions, mb_values, mb_dones = [], [], [], [], [] mb_states = self.states ep_infos = [] for _ in range(self.n_steps): actions, values, states, _ = self.model.step(self.obs, self.states, self.dones) mb_obs.append(np.copy(self.obs)) mb_actions.append(actions) mb_values.append(values) mb_dones.append(self.dones) clipped_actions = actions if isinstance(self.env.action_space, gym.spaces.Box): clipped_actions = np.clip(actions, self.env.action_space.low, self.env.action_space.high) obs, rewards, dones, infos = self.env.step(clipped_actions) for info in infos: maybe_ep_info = info.get('episode') if maybe_ep_info is not None: ep_infos.append(maybe_ep_info) self.states = states self.dones = dones self.obs = obs mb_rewards.append(rewards) mb_dones.append(self.dones) mb_obs = np.asarray(mb_obs, dtype=self.obs.dtype).swapaxes(1, 0).reshape(self.batch_ob_shape) mb_rewards = np.asarray(mb_rewards, dtype=np.float32).swapaxes(0, 1) mb_actions = np.asarray(mb_actions, dtype=self.env.action_space.dtype).swapaxes(0, 1) mb_values = np.asarray(mb_values, dtype=np.float32).swapaxes(0, 1) mb_dones = np.asarray(mb_dones, dtype=np.bool).swapaxes(0, 1) mb_masks = mb_dones[:, :-1] mb_dones = mb_dones[:, 1:] true_rewards = np.copy(mb_rewards) last_values = self.model.value(self.obs, self.states, self.dones).tolist() for n, (rewards, dones, value) in enumerate(zip(mb_rewards, mb_dones, last_values)): rewards = rewards.tolist() dones = dones.tolist() if dones[-1] == 0: rewards = discount_with_dones(rewards + [value], dones + [0], self.gamma)[:-1] else: rewards = discount_with_dones(rewards, dones, self.gamma) mb_rewards[n] = rewards mb_rewards = mb_rewards.reshape(-1, *mb_rewards.shape[2:]) mb_actions = mb_actions.reshape(-1, *mb_actions.shape[2:]) mb_values = mb_values.reshape(-1, *mb_values.shape[2:]) mb_masks = mb_masks.reshape(-1, *mb_masks.shape[2:]) true_rewards = true_rewards.reshape(-1, *true_rewards.shape[2:]) return mb_obs, mb_states, mb_rewards, mb_masks, mb_actions, mb_values, ep_infos, true_rewards
true
true
f711e50db3b1e0ae05786cda0ee212109cd501ae
8,321
py
Python
squash/dashboard/viz/api_helper.py
lsst-sqre/qa-dashboard
57d40a33f1d6fdc04fb8f5e6e7e4fcfaee25340c
[ "MIT" ]
2
2016-12-29T18:17:55.000Z
2017-03-01T20:20:52.000Z
squash/dashboard/viz/api_helper.py
lsst-sqre/qa-dashboard
57d40a33f1d6fdc04fb8f5e6e7e4fcfaee25340c
[ "MIT" ]
14
2016-04-08T17:11:06.000Z
2017-06-29T23:29:03.000Z
squash/dashboard/viz/api_helper.py
lsst-sqre/qa-dashboard
57d40a33f1d6fdc04fb8f5e6e7e4fcfaee25340c
[ "MIT" ]
1
2016-05-03T22:52:02.000Z
2016-05-03T22:52:02.000Z
import os import pandas as pd import requests from datetime import datetime from furl import furl SQUASH_API_URL = os.environ.get('SQUASH_API_URL', 'http://localhost:8000/dashboard/api/') def get_endpoint_urls(): """ Lookup API endpoint URLs """ r = requests.get(SQUASH_API_URL) r.raise_for_status() return r.json() def get_data(endpoint, params=None): """Return data as a dict from an API endpoint """ api = get_endpoint_urls() # e.g. http://localhost:8000/AMx?ci_id=1&ci_dataset=cfht&metric=AM1 r = requests.get(api[endpoint], params=params) r.raise_for_status() return r.json() def get_data_as_pandas_df(endpoint, params=None): """ Return data as a pandas dataframe from an API endpoint """ result = get_data(endpoint, params) data = pd.DataFrame.from_dict(result, orient='index').transpose() return data def get_datasets(default=None): """Get a list of datasets from the API and a default value Returns ------- datasets : list list of dataset names default : str if a valid default value is provided, overwrite the default value obtained from the API """ datasets = get_data('datasets') default_dataset = get_data('defaults')['ci_dataset'] if default: if default in datasets: default_dataset = default return {'datasets': datasets, 'default': default_dataset} def get_metrics(default=None): """Get the list of metrics from the API and a default value Returns ------- metrics : list list of metric names default : str if a valid default value is provided, overwrite the default value returned from the API """ r = get_data('metrics') metrics = [m['metric'] for m in r['results']] default_metric = get_data('defaults')['metric'] if default: if default in metrics: default_metric = default return {'metrics': metrics, 'default': default_metric} def get_value(specs, name): """ Helper function to unpack metric specification values Parameters ---------- specs: dict a dict with keys value and name name: str the spec name Return ------ value: float or None value of the spec if exists, None otherwise """ value = None for s in specs: if s['name'] == name: value = s['value'] break return value def get_specs(name): """Get metric specifications thresholds from its name Parameters ---------- name: str a valid metric name Returns ------- unit: str metric unit description: metric description minimum: float metric minimum specification design: float metric design specification stretch: float metric stretch goal """ r = get_data('metrics') unit = str() description = str() specs = [] minimum = None design = None stretch = None for m in r['results']: if m['metric'] == name: unit = m['unit'] description = m['description'] specs = eval(str(m['specs'])) break if specs: minimum = get_value(specs, 'minimum') design = get_value(specs, 'design') stretch = get_value(specs, 'stretch') return {'unit': unit, 'description': description, 'minimum': minimum, 'design': design, 'stretch': stretch} def get_url_args(doc, defaults=None): """Return url args recovered from django_full_path cookie in the bokeh request header. If defaults values are provided, overwrite the default values obtained from the API """ args = get_data('defaults') # overwrite api default values if defaults: for key in defaults: args[key] = defaults[key] r = doc().session_context.request if r: if 'django_full_path' in r.cookies: django_full_path = r.cookies['django_full_path'].value tmp = furl(django_full_path).args for key in tmp: # overwrite default values with those passed # as url args, make sure the url arg (key) is valid if key in args: args[key] = tmp[key] # the bokeh app name is the second segment of the url path args['bokeh_app'] = furl(django_full_path).path.segments[1] return args # TODO: these functions are used by the monitor app and need refactoring def get_initial_page(page_size, num_pages, window): # Page size in hours assuming CI_TIME_INTERVAL CI_TIME_INTERVAL = 8 page_window = page_size * CI_TIME_INTERVAL if window == 'weeks': initial_page = num_pages - int((24*7)/page_window) elif window == 'months': # maximum window of 3 months initial_page = num_pages - int((24*30*3)/page_window) elif window == 'years': # maximum window of 1 year initial_page = num_pages - int((24*365)/page_window) else: # everything initial_page = 1 # Make sure we have enough pages for the input time window if initial_page < 1: initial_page = 1 return initial_page def get_meas_by_dataset_and_metric(selected_dataset, selected_metric, window): """ Get measurements for a given dataset and metric from the measurements api endpoint Parameters ---------- selected_dataset : str the current selected dataset selected_metric : str the current selected metric Returns ------- ci_id : list list of job ids from the CI system dates : list list of datetimes for each job measurement measurements : list flat list of dicts where the key is the metric and the value is its measurement ci_url : list list of URLs for the jobs in the CI system """ api = get_endpoint_urls() # http://localhost:8000/dashboard/api/measurements/?job__ci_dataset=cfht&metric=AM1 r = requests.get(api['measurements'], params={'job__ci_dataset': selected_dataset, 'metric': selected_metric}) r.raise_for_status() results = r.json() # results are paginated, walk through each page # TODO: figure out how to retrieve the number of pages in DRF count = results['count'] page_size = len(results['results']) measurements = [] if page_size > 0: # ceiling integer num_pages = int(count/page_size) + (count % page_size > 0) initial_page = get_initial_page(page_size, num_pages, window) for page in range(initial_page, num_pages + 1): r = requests.get( api['measurements'], params={'job__ci_dataset': selected_dataset, 'metric': selected_metric, 'page': page}) r.raise_for_status() measurements.extend(r.json()['results']) ci_ids = [int(m['ci_id']) for m in measurements] # 2016-08-10T05:22:37.700146Z # after DM-7517 jobs return is sorted by date and the same is done for # the measurements dates = [datetime.strptime(m['date'], '%Y-%m-%dT%H:%M:%S.%fZ') for m in measurements] values = [m['value'] for m in measurements] ci_urls = [m['ci_url'] for m in measurements] packages = [m['changed_packages'] for m in measurements] # list of package names, name is the first element in the tuple names = [] for i, sublist in enumerate(packages): names.append([]) for package in sublist: names[i].append(package[0]) # list of git urls, git package commit sha and base url are the second and # third elements in the tuple git_urls = [] for i, sublist in enumerate(packages): git_urls.append([]) for package in sublist: git_urls[i].append("{}/commit/{}".format(package[2].strip('.git'), package[1])) return {'ci_ids': ci_ids, 'dates': dates, 'values': values, 'ci_urls': ci_urls, 'names': names, 'git_urls': git_urls}
26.415873
87
0.604855
import os import pandas as pd import requests from datetime import datetime from furl import furl SQUASH_API_URL = os.environ.get('SQUASH_API_URL', 'http://localhost:8000/dashboard/api/') def get_endpoint_urls(): r = requests.get(SQUASH_API_URL) r.raise_for_status() return r.json() def get_data(endpoint, params=None): api = get_endpoint_urls() r = requests.get(api[endpoint], params=params) r.raise_for_status() return r.json() def get_data_as_pandas_df(endpoint, params=None): result = get_data(endpoint, params) data = pd.DataFrame.from_dict(result, orient='index').transpose() return data def get_datasets(default=None): datasets = get_data('datasets') default_dataset = get_data('defaults')['ci_dataset'] if default: if default in datasets: default_dataset = default return {'datasets': datasets, 'default': default_dataset} def get_metrics(default=None): r = get_data('metrics') metrics = [m['metric'] for m in r['results']] default_metric = get_data('defaults')['metric'] if default: if default in metrics: default_metric = default return {'metrics': metrics, 'default': default_metric} def get_value(specs, name): value = None for s in specs: if s['name'] == name: value = s['value'] break return value def get_specs(name): r = get_data('metrics') unit = str() description = str() specs = [] minimum = None design = None stretch = None for m in r['results']: if m['metric'] == name: unit = m['unit'] description = m['description'] specs = eval(str(m['specs'])) break if specs: minimum = get_value(specs, 'minimum') design = get_value(specs, 'design') stretch = get_value(specs, 'stretch') return {'unit': unit, 'description': description, 'minimum': minimum, 'design': design, 'stretch': stretch} def get_url_args(doc, defaults=None): args = get_data('defaults') if defaults: for key in defaults: args[key] = defaults[key] r = doc().session_context.request if r: if 'django_full_path' in r.cookies: django_full_path = r.cookies['django_full_path'].value tmp = furl(django_full_path).args for key in tmp: if key in args: args[key] = tmp[key] args['bokeh_app'] = furl(django_full_path).path.segments[1] return args def get_initial_page(page_size, num_pages, window): CI_TIME_INTERVAL = 8 page_window = page_size * CI_TIME_INTERVAL if window == 'weeks': initial_page = num_pages - int((24*7)/page_window) elif window == 'months': initial_page = num_pages - int((24*30*3)/page_window) elif window == 'years': initial_page = num_pages - int((24*365)/page_window) else: initial_page = 1 if initial_page < 1: initial_page = 1 return initial_page def get_meas_by_dataset_and_metric(selected_dataset, selected_metric, window): api = get_endpoint_urls() r = requests.get(api['measurements'], params={'job__ci_dataset': selected_dataset, 'metric': selected_metric}) r.raise_for_status() results = r.json() count = results['count'] page_size = len(results['results']) measurements = [] if page_size > 0: num_pages = int(count/page_size) + (count % page_size > 0) initial_page = get_initial_page(page_size, num_pages, window) for page in range(initial_page, num_pages + 1): r = requests.get( api['measurements'], params={'job__ci_dataset': selected_dataset, 'metric': selected_metric, 'page': page}) r.raise_for_status() measurements.extend(r.json()['results']) ci_ids = [int(m['ci_id']) for m in measurements] dates = [datetime.strptime(m['date'], '%Y-%m-%dT%H:%M:%S.%fZ') for m in measurements] values = [m['value'] for m in measurements] ci_urls = [m['ci_url'] for m in measurements] packages = [m['changed_packages'] for m in measurements] names = [] for i, sublist in enumerate(packages): names.append([]) for package in sublist: names[i].append(package[0]) git_urls = [] for i, sublist in enumerate(packages): git_urls.append([]) for package in sublist: git_urls[i].append("{}/commit/{}".format(package[2].strip('.git'), package[1])) return {'ci_ids': ci_ids, 'dates': dates, 'values': values, 'ci_urls': ci_urls, 'names': names, 'git_urls': git_urls}
true
true
f711e542c62977ca0298efb71fdbcddc78077b81
186
py
Python
Run Files/multi-sub.py
shahmari/Prevention_and_Quarantine_on_SIR
9173fd3feaa86a79d829ee653ec2c70f678e2ac3
[ "MIT" ]
1
2021-08-25T09:56:10.000Z
2021-08-25T09:56:10.000Z
Run Files/multi-sub.py
shahmari/Prevention_and_Quarantine_on_SIR
9173fd3feaa86a79d829ee653ec2c70f678e2ac3
[ "MIT" ]
null
null
null
Run Files/multi-sub.py
shahmari/Prevention_and_Quarantine_on_SIR
9173fd3feaa86a79d829ee653ec2c70f678e2ac3
[ "MIT" ]
null
null
null
import time jobNumber=10 for i in range(jobNumber): qsub_command = "qsub job.sh" print(qsub_command) exit_status = subprocess.call(qsub_command, shell=True) time.sleep(6)
26.571429
59
0.725806
import time jobNumber=10 for i in range(jobNumber): qsub_command = "qsub job.sh" print(qsub_command) exit_status = subprocess.call(qsub_command, shell=True) time.sleep(6)
true
true
f711e544a0dc6217467d5a0efe20891c8d308cd5
2,929
py
Python
tkinter/studenttracking_gui.py
blulady/python
65d8e99f6411cf79be0353abc99a2677dfeebe11
[ "bzip2-1.0.6" ]
null
null
null
tkinter/studenttracking_gui.py
blulady/python
65d8e99f6411cf79be0353abc99a2677dfeebe11
[ "bzip2-1.0.6" ]
null
null
null
tkinter/studenttracking_gui.py
blulady/python
65d8e99f6411cf79be0353abc99a2677dfeebe11
[ "bzip2-1.0.6" ]
1
2020-09-11T16:05:46.000Z
2020-09-11T16:05:46.000Z
from tkinter import * import tkinter as tk import studenttracking_main import studenttracking_fnct def load_gui(self): self.lbl_subform = tk.Label(self.master,text='Submission Form') self.lbl_subform.grid(row=0,column=1,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_fname = tk.Label(self.master,text='First Name:') self.lbl_fname.grid(row=2,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_lname = tk.Label(self.master,text='Last Name:') self.lbl_lname.grid(row=3,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_phone = tk.Label(self.master,text='Phone:') self.lbl_phone.grid(row=4,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_email = tk.Label(self.master,text='Email:') self.lbl_email.grid(row=5,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_course = tk.Label(self.master,text='Course:') self.lbl_course.grid(row=7,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_info = tk.Label(self.master,text='Information') self.lbl_info.grid(row=0,column=4,padx=(27,0),pady=(10,0),sticky=N+W) self.txt_fname = tk.Entry(self.master,text='') self.txt_fname.grid(row=2,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_lname = tk.Entry(self.master,text='') self.txt_lname.grid(row=3,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_phone = tk.Entry(self.master,text='') self.txt_phone.grid(row=4,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_email = tk.Entry(self.master,text='') self.txt_email.grid(row=5,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_course = tk.Entry(self.master,text='') self.txt_course.grid(row=7,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.scrollbar1 = Scrollbar(self.master,orient=VERTICAL) self.lstList1 = Listbox(self.master,exportselection=0,yscrollcommand=self.scrollbar1.set) self.lstList1.bind('<<ListboxSelect>>',lambda event: studenttracking_fnct.onSelect(self,event)) self.scrollbar1.config(command=self.lstList1.yview) self.scrollbar1.grid(row=1,column=7,rowspan=8,columnspan=1,padx=(0,0),pady=(0,0),sticky=N+E+S) self.lstList1.grid(row=2,column=3,rowspan=7,columnspan=4,padx=(0,0),pady=(0,0),sticky=N+E+S+W) self.btn_submit = tk.Button(self.master,width=12,height=2,text='Submit',command=lambda: studenttracking_fnct.submit(self)) self.btn_submit.grid(row=7,column=0,padx=(25,0),pady=(45,10),sticky=W) self.btn_delete = tk.Button(self.master,width=12,height=2,text='Delete',command=lambda: studenttracking_fnct.onDelete(self)) self.btn_delete.grid(row=7,column=1,padx=(25,0),pady=(45,10),sticky=W) studenttracking_fnct.create_db(self) studenttracking_fnct.onRefresh(self) if __name__ == "__main__": pass
54.240741
129
0.699556
from tkinter import * import tkinter as tk import studenttracking_main import studenttracking_fnct def load_gui(self): self.lbl_subform = tk.Label(self.master,text='Submission Form') self.lbl_subform.grid(row=0,column=1,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_fname = tk.Label(self.master,text='First Name:') self.lbl_fname.grid(row=2,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_lname = tk.Label(self.master,text='Last Name:') self.lbl_lname.grid(row=3,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_phone = tk.Label(self.master,text='Phone:') self.lbl_phone.grid(row=4,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_email = tk.Label(self.master,text='Email:') self.lbl_email.grid(row=5,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_course = tk.Label(self.master,text='Course:') self.lbl_course.grid(row=7,column=0,padx=(27,0),pady=(10,0),sticky=N+W) self.lbl_info = tk.Label(self.master,text='Information') self.lbl_info.grid(row=0,column=4,padx=(27,0),pady=(10,0),sticky=N+W) self.txt_fname = tk.Entry(self.master,text='') self.txt_fname.grid(row=2,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_lname = tk.Entry(self.master,text='') self.txt_lname.grid(row=3,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_phone = tk.Entry(self.master,text='') self.txt_phone.grid(row=4,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_email = tk.Entry(self.master,text='') self.txt_email.grid(row=5,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.txt_course = tk.Entry(self.master,text='') self.txt_course.grid(row=7,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W) self.scrollbar1 = Scrollbar(self.master,orient=VERTICAL) self.lstList1 = Listbox(self.master,exportselection=0,yscrollcommand=self.scrollbar1.set) self.lstList1.bind('<<ListboxSelect>>',lambda event: studenttracking_fnct.onSelect(self,event)) self.scrollbar1.config(command=self.lstList1.yview) self.scrollbar1.grid(row=1,column=7,rowspan=8,columnspan=1,padx=(0,0),pady=(0,0),sticky=N+E+S) self.lstList1.grid(row=2,column=3,rowspan=7,columnspan=4,padx=(0,0),pady=(0,0),sticky=N+E+S+W) self.btn_submit = tk.Button(self.master,width=12,height=2,text='Submit',command=lambda: studenttracking_fnct.submit(self)) self.btn_submit.grid(row=7,column=0,padx=(25,0),pady=(45,10),sticky=W) self.btn_delete = tk.Button(self.master,width=12,height=2,text='Delete',command=lambda: studenttracking_fnct.onDelete(self)) self.btn_delete.grid(row=7,column=1,padx=(25,0),pady=(45,10),sticky=W) studenttracking_fnct.create_db(self) studenttracking_fnct.onRefresh(self) if __name__ == "__main__": pass
true
true
f711e63ba9ef37c10f831ed1e5a1dff0fdbe22b9
22,083
py
Python
TWLight/emails/tests.py
soumyaa1804/TWLight
25b58c9e4657919f08351f2578f5221f2e0c8e83
[ "MIT" ]
null
null
null
TWLight/emails/tests.py
soumyaa1804/TWLight
25b58c9e4657919f08351f2578f5221f2e0c8e83
[ "MIT" ]
null
null
null
TWLight/emails/tests.py
soumyaa1804/TWLight
25b58c9e4657919f08351f2578f5221f2e0c8e83
[ "MIT" ]
null
null
null
from datetime import datetime, timedelta from djmail.template_mail import MagicMailBuilder, InlineCSSTemplateMail from unittest.mock import patch from django_comments import get_form_target from django_comments.models import Comment from django_comments.signals import comment_was_posted from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.core import mail from django.core.management import call_command from django.core.urlresolvers import reverse from django.test import TestCase, RequestFactory from TWLight.applications.factories import ApplicationFactory from TWLight.applications.models import Application from TWLight.resources.factories import PartnerFactory from TWLight.resources.models import Partner from TWLight.resources.tests import EditorCraftRoom from TWLight.users.factories import EditorFactory, UserFactory from TWLight.users.groups import get_coordinators from TWLight.users.models import Authorization # We need to import these in order to register the signal handlers; if we don't, # when we test that those handler functions have been called, we will get # False even when they work in real life. from .tasks import ( send_comment_notification_emails, send_approval_notification_email, send_rejection_notification_email, send_user_renewal_notice_emails, send_proxy_bundle_launch_notice, contact_us_emails, ) class ApplicationCommentTest(TestCase): def setUp(self): super(ApplicationCommentTest, self).setUp() self.editor = EditorFactory(user__email="editor@example.com").user coordinators = get_coordinators() self.coordinator1 = EditorFactory( user__email="c1@example.com", user__username="c1" ).user self.coordinator2 = EditorFactory( user__email="c2@example.com", user__username="c2" ).user coordinators.user_set.add(self.coordinator1) coordinators.user_set.add(self.coordinator2) self.partner = PartnerFactory() def _create_comment(self, app, user): CT = ContentType.objects.get_for_model comm = Comment.objects.create( content_type=CT(Application), object_pk=app.pk, user=user, user_name=user.username, comment="Content!", site=Site.objects.get_current(), ) comm.save() return comm def _set_up_email_test_objects(self): app = ApplicationFactory(editor=self.editor.editor, partner=self.partner) factory = RequestFactory() request = factory.post(get_form_target()) return app, request def test_comment_email_sending_1(self): """ A coordinator posts a comment to an Editor's application and an email is send to that Editor. An email is not sent to the coordinator. """ app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) comment1 = self._create_comment(app, self.coordinator1) comment_was_posted.send(sender=Comment, comment=comment1, request=request) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.editor.email]) def test_comment_email_sending_2(self): """ After a coordinator posts a comment, the Editor posts an additional comment. An email is sent to the coordinator who posted the earlier comment. An email is not sent to the editor. """ app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) _ = self._create_comment(app, self.coordinator1) comment2 = self._create_comment(app, self.editor) comment_was_posted.send(sender=Comment, comment=comment2, request=request) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.coordinator1.email]) def test_comment_email_sending_3(self): """ After the editor and coordinator post a comment, an additional coordinator posts a comment. One email is sent to the first coordinator, and a distinct email is sent to the editor. """ app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) _ = self._create_comment(app, self.coordinator1) _ = self._create_comment(app, self.editor) comment3 = self._create_comment(app, self.coordinator2) comment_was_posted.send(sender=Comment, comment=comment3, request=request) self.assertEqual(len(mail.outbox), 2) # Either order of email sending is fine. try: self.assertEqual(mail.outbox[0].to, [self.coordinator1.email]) self.assertEqual(mail.outbox[1].to, [self.editor.email]) except AssertionError: self.assertEqual(mail.outbox[1].to, [self.coordinator1.email]) self.assertEqual(mail.outbox[0].to, [self.editor.email]) def test_comment_email_sending_4(self): """ A comment made on an application that's any further along the process than PENDING (i.e. a coordinator has taken some action on it) should fire an email to the coordinator who took the last action on it. """ app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) # Create a coordinator with a test client session coordinator = EditorCraftRoom(self, Terms=True, Coordinator=True) self.partner.coordinator = coordinator.user self.partner.save() # Approve the application url = reverse("applications:evaluate", kwargs={"pk": app.pk}) response = self.client.post( url, data={"status": Application.QUESTION}, follow=True ) comment4 = self._create_comment(app, self.editor) comment_was_posted.send(sender=Comment, comment=comment4, request=request) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [coordinator.user.email]) def test_comment_email_sending_5(self): """ A comment from the applying editor made on an application that has had no actions taken on it and no existing comments should not fire an email to anyone. """ app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) comment5 = self._create_comment(app, self.editor) comment_was_posted.send(sender=Comment, comment=comment5, request=request) self.assertEqual(len(mail.outbox), 0) # We'd like to mock out send_comment_notification_emails and test that # it is called when comment_was_posted is fired, but we can't; the signal # handler is attached to the real send_comment_notification_emails, not # the mocked one. class ApplicationStatusTest(TestCase): @patch("TWLight.emails.tasks.send_approval_notification_email") def test_approval_calls_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.APPROVED app.save() self.assertTrue(mock_email.called) @patch("TWLight.emails.tasks.send_approval_notification_email") def test_reapproval_does_not_call_email_function(self, mock_email): """ Saving an Application with APPROVED status, when it already had an APPROVED status, should not re-send the email. """ app = ApplicationFactory(status=Application.PENDING) app.status = Application.APPROVED app.save() app.save() self.assertEqual(mock_email.call_count, 1) @patch("TWLight.emails.tasks.send_rejection_notification_email") def test_rejection_calls_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.NOT_APPROVED app.save() self.assertTrue(mock_email.called) @patch("TWLight.emails.tasks.send_rejection_notification_email") def test_rerejection_does_not_call_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.NOT_APPROVED app.save() app.save() self.assertEqual(mock_email.call_count, 1) def test_pending_does_not_call_email_function(self): """ Applications saved with a PENDING status should not generate email. """ orig_outbox = len(mail.outbox) _ = ApplicationFactory(status=Application.PENDING) self.assertEqual(len(mail.outbox), orig_outbox) def test_question_does_not_call_email_function(self): """ Applications saved with a QUESTION status should not generate email. """ orig_outbox = len(mail.outbox) _ = ApplicationFactory(status=Application.QUESTION) self.assertEqual(len(mail.outbox), orig_outbox) def test_sent_does_not_call_email_function(self): """ Applications saved with a SENT status should not generate email. """ orig_outbox = len(mail.outbox) _ = ApplicationFactory(status=Application.SENT) self.assertEqual(len(mail.outbox), orig_outbox) @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_waitlist_calls_email_function(self, mock_email): partner = PartnerFactory(status=Partner.WAITLIST) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertTrue(mock_email.called) partner.delete() app.delete() @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_nonwaitlist_does_not_call_email_function(self, mock_email): partner = PartnerFactory(status=Partner.AVAILABLE) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertFalse(mock_email.called) partner.delete() app.delete() partner = PartnerFactory(status=Partner.NOT_AVAILABLE) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertFalse(mock_email.called) partner.delete() app.delete() @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_waitlisting_partner_calls_email_function(self, mock_email): """ Switching a Partner to WAITLIST status should call the email function for apps to that partner with open statuses. """ partner = PartnerFactory(status=Partner.AVAILABLE) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertFalse(mock_email.called) partner.status = Partner.WAITLIST partner.save() self.assertTrue(mock_email.called) mock_email.assert_called_with(app) @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_waitlisting_partner_does_not_call_email_function(self, mock_email): """ Switching a Partner to WAITLIST status should NOT call the email function for apps to that partner with closed statuses. """ partner = PartnerFactory(status=Partner.AVAILABLE) app = ApplicationFactory(status=Application.APPROVED, partner=partner) app = ApplicationFactory(status=Application.NOT_APPROVED, partner=partner) app = ApplicationFactory(status=Application.SENT, partner=partner) self.assertFalse(mock_email.called) partner.status = Partner.WAITLIST partner.save() self.assertFalse(mock_email.called) class ContactUsTest(TestCase): def setUp(self): super(ContactUsTest, self).setUp() self.editor = EditorFactory(user__email="editor@example.com").user @patch("TWLight.emails.tasks.contact_us_emails") def test_contact_us_emails(self, mock_email): factory = RequestFactory() request = factory.post(get_form_target()) request.user = UserFactory() editor = EditorFactory() reply_to = ["editor@example.com"] cc = ["editor@example.com"] self.assertEqual(len(mail.outbox), 0) mail_instance = MagicMailBuilder(template_mail_cls=InlineCSSTemplateMail) email = mail_instance.contact_us_email( "wikipedialibrary@wikimedia.org", {"editor_wp_username": editor.wp_username, "body": "This is a test email"}, ) email.extra_headers["Reply-To"] = ", ".join(reply_to) email.extra_headers["Cc"] = ", ".join(cc) email.send() self.assertEqual(len(mail.outbox), 1) def test_user_submit_contact_us_emails(self): EditorCraftRoom(self, Terms=True, Coordinator=False) self.assertEqual(len(mail.outbox), 0) contact_us_url = reverse("contact") contact_us = self.client.get(contact_us_url, follow=True) contact_us_form = contact_us.context["form"] data = contact_us_form.initial data["email"] = "editor@example.com" data["message"] = "This is a test" data["cc"] = True data["submit"] = True self.client.post(contact_us_url, data) self.assertEqual(len(mail.outbox), 1) def test_not_logged_in_user_submit_contact_us_emails(self): self.assertEqual(len(mail.outbox), 0) contact_us_url = reverse("contact") contact_us = self.client.get(contact_us_url, follow=True) contact_us_form = contact_us.context["form"] data = contact_us_form.initial data["email"] = "editor@example.com" data["message"] = "This is a test" data["submit"] = True data["cc"] = True self.client.post(contact_us_url, data) self.assertEqual(len(mail.outbox), 0) class UserRenewalNoticeTest(TestCase): def setUp(self): super(UserRenewalNoticeTest, self).setUp() editor = EditorFactory(user__email="editor@example.com") self.user = editor.user self.coordinator = EditorFactory().user coordinators = get_coordinators() coordinators.user_set.add(self.coordinator) self.partner = PartnerFactory() self.authorization = Authorization() self.authorization.user = self.user self.authorization.authorizer = self.coordinator self.authorization.partner = self.partner self.authorization.date_expires = datetime.today() + timedelta(weeks=2) self.authorization.save() def test_single_user_renewal_notice(self): """ Given one authorization that expires in two weeks, ensure that our email task sends an email to that user. """ call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.user.email]) def test_user_renewal_notice_disabled(self): """ Users have the option to disable renewal notices. If users have disabled emails, we shouldn't send them one. """ self.user.userprofile.send_renewal_notices = False self.user.userprofile.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 0) def test_user_renewal_notice_doesnt_duplicate(self): """ If we run the command a second time, the same user shouldn't receive a second email. """ call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 1) call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 1) def test_user_renewal_notice_past_date(self): """ If the authorization expired before today, the user shouldn't receive a notice. """ self.authorization.date_expires = datetime.today() - timedelta(weeks=1) self.authorization.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 0) def test_user_renewal_notice_future_date(self): """ If the authorization doesn't expire for months, the user shouldn't receive a notice. """ self.authorization.date_expires = datetime.today() + timedelta(weeks=8) self.authorization.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 0) def test_user_renewal_notice_future_date_1(self): """ If we have multiple authorizations to send emails for, let's make sure we send distinct emails to the right places. """ editor2 = EditorFactory(user__email="editor2@example.com") authorization2 = Authorization() authorization2.user = editor2.user authorization2.authorizer = self.coordinator authorization2.partner = self.partner authorization2.date_expires = datetime.today() + timedelta(weeks=1) authorization2.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 2) # Make sure that the two emails went to the two expected # email addresses. # This looks a little complicated because mail.outbox[0].to is a # (one element) list, and we need to compare sets to ensure we've # got 1 of each email. self.assertEqual( {mail.outbox[0].to[0], mail.outbox[1].to[0]}, {"editor@example.com", "editor2@example.com"}, ) class CoordinatorReminderEmailTest(TestCase): def setUp(self): super(CoordinatorReminderEmailTest, self).setUp() editor = EditorFactory() self.user = editor.user editor2 = EditorFactory() self.user2 = editor2.user self.coordinator = EditorFactory(user__email="editor@example.com").user coordinators = get_coordinators() coordinators.user_set.add(self.coordinator) self.partner = PartnerFactory(coordinator=self.coordinator) self.partner2 = PartnerFactory(coordinator=self.coordinator) def test_send_coordinator_reminder_email(self): ApplicationFactory( partner=self.partner, status=Application.PENDING, editor=self.user.editor ) # Coordinator only wants reminders for apps under discussion self.coordinator.userprofile.pending_app_reminders = False self.coordinator.userprofile.approved_app_reminders = False self.coordinator.userprofile.save() call_command("send_coordinator_reminders") self.assertEqual(len(mail.outbox), 0) ApplicationFactory( partner=self.partner2, status=Application.QUESTION, editor=self.user2.editor ) call_command("send_coordinator_reminders") self.assertEqual(len(mail.outbox), 1) # We include the count for all waiting (PENDING, QUESTION, # APPROVED) apps whenever we send an email, but trigger # emails only based on preferences i.e. if a coordinator # has enabled reminders only for QUESTION, we send a # reminder only when we have an app of status: QUESTION, # but include info on all apps in the email. self.assertNotIn("One pending application", mail.outbox[0].body) self.assertIn("One under discussion application", mail.outbox[0].body) self.assertNotIn("One approved application", mail.outbox[0].body) ApplicationFactory( partner=self.partner, status=Application.APPROVED, editor=self.user2.editor ) ApplicationFactory( partner=self.partner2, status=Application.SENT, editor=self.user.editor ) # Clear mail outbox since approvals send emails mail.outbox = [] # Coordinator only wants reminders for apps under discussion self.coordinator.userprofile.pending_app_reminders = True self.coordinator.userprofile.approved_app_reminders = True self.coordinator.userprofile.save() call_command("send_coordinator_reminders") self.assertEqual(len(mail.outbox), 1) self.assertIn("One pending application", mail.outbox[0].body) self.assertIn("One under discussion application", mail.outbox[0].body) self.assertIn("One approved application", mail.outbox[0].body) class ProxyBundleLaunchTest(TestCase): def setUp(self): super(ProxyBundleLaunchTest, self).setUp() editor = EditorFactory(user__email="editor@example.com") self.user = editor.user def test_proxy_bundle_launch_email_1(self): """ With one user, calling the proxy/bundle launch command should send a single email, to that user. """ call_command("proxy_bundle_launch") self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.user.email]) def test_proxy_bundle_launch_email_2(self): """ Adding another user should result in two sent emails. """ _ = EditorFactory(user__email="editor@example.com") call_command("proxy_bundle_launch") self.assertEqual(len(mail.outbox), 2) def test_proxy_bundle_launch_email_3(self): """ The proxy/bundle launch command should record the email was sent """ self.assertFalse(self.user.userprofile.proxy_notification_sent) call_command("proxy_bundle_launch") self.user.userprofile.refresh_from_db() self.assertTrue(self.user.userprofile.proxy_notification_sent) def test_proxy_bundle_launch_email_4(self): """ The proxy/bundle launch command should not send to a user we recorded as having received the email already. """ self.user.userprofile.proxy_notification_sent = True self.user.userprofile.save() call_command("proxy_bundle_launch") self.assertEqual(len(mail.outbox), 0)
38.074138
88
0.683059
from datetime import datetime, timedelta from djmail.template_mail import MagicMailBuilder, InlineCSSTemplateMail from unittest.mock import patch from django_comments import get_form_target from django_comments.models import Comment from django_comments.signals import comment_was_posted from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.core import mail from django.core.management import call_command from django.core.urlresolvers import reverse from django.test import TestCase, RequestFactory from TWLight.applications.factories import ApplicationFactory from TWLight.applications.models import Application from TWLight.resources.factories import PartnerFactory from TWLight.resources.models import Partner from TWLight.resources.tests import EditorCraftRoom from TWLight.users.factories import EditorFactory, UserFactory from TWLight.users.groups import get_coordinators from TWLight.users.models import Authorization # when we test that those handler functions have been called, we will get # False even when they work in real life. from .tasks import ( send_comment_notification_emails, send_approval_notification_email, send_rejection_notification_email, send_user_renewal_notice_emails, send_proxy_bundle_launch_notice, contact_us_emails, ) class ApplicationCommentTest(TestCase): def setUp(self): super(ApplicationCommentTest, self).setUp() self.editor = EditorFactory(user__email="editor@example.com").user coordinators = get_coordinators() self.coordinator1 = EditorFactory( user__email="c1@example.com", user__username="c1" ).user self.coordinator2 = EditorFactory( user__email="c2@example.com", user__username="c2" ).user coordinators.user_set.add(self.coordinator1) coordinators.user_set.add(self.coordinator2) self.partner = PartnerFactory() def _create_comment(self, app, user): CT = ContentType.objects.get_for_model comm = Comment.objects.create( content_type=CT(Application), object_pk=app.pk, user=user, user_name=user.username, comment="Content!", site=Site.objects.get_current(), ) comm.save() return comm def _set_up_email_test_objects(self): app = ApplicationFactory(editor=self.editor.editor, partner=self.partner) factory = RequestFactory() request = factory.post(get_form_target()) return app, request def test_comment_email_sending_1(self): app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) comment1 = self._create_comment(app, self.coordinator1) comment_was_posted.send(sender=Comment, comment=comment1, request=request) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.editor.email]) def test_comment_email_sending_2(self): app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) _ = self._create_comment(app, self.coordinator1) comment2 = self._create_comment(app, self.editor) comment_was_posted.send(sender=Comment, comment=comment2, request=request) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.coordinator1.email]) def test_comment_email_sending_3(self): app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) _ = self._create_comment(app, self.coordinator1) _ = self._create_comment(app, self.editor) comment3 = self._create_comment(app, self.coordinator2) comment_was_posted.send(sender=Comment, comment=comment3, request=request) self.assertEqual(len(mail.outbox), 2) # Either order of email sending is fine. try: self.assertEqual(mail.outbox[0].to, [self.coordinator1.email]) self.assertEqual(mail.outbox[1].to, [self.editor.email]) except AssertionError: self.assertEqual(mail.outbox[1].to, [self.coordinator1.email]) self.assertEqual(mail.outbox[0].to, [self.editor.email]) def test_comment_email_sending_4(self): app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) # Create a coordinator with a test client session coordinator = EditorCraftRoom(self, Terms=True, Coordinator=True) self.partner.coordinator = coordinator.user self.partner.save() # Approve the application url = reverse("applications:evaluate", kwargs={"pk": app.pk}) response = self.client.post( url, data={"status": Application.QUESTION}, follow=True ) comment4 = self._create_comment(app, self.editor) comment_was_posted.send(sender=Comment, comment=comment4, request=request) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [coordinator.user.email]) def test_comment_email_sending_5(self): app, request = self._set_up_email_test_objects() request.user = UserFactory() self.assertEqual(len(mail.outbox), 0) comment5 = self._create_comment(app, self.editor) comment_was_posted.send(sender=Comment, comment=comment5, request=request) self.assertEqual(len(mail.outbox), 0) # We'd like to mock out send_comment_notification_emails and test that # handler is attached to the real send_comment_notification_emails, not # the mocked one. class ApplicationStatusTest(TestCase): @patch("TWLight.emails.tasks.send_approval_notification_email") def test_approval_calls_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.APPROVED app.save() self.assertTrue(mock_email.called) @patch("TWLight.emails.tasks.send_approval_notification_email") def test_reapproval_does_not_call_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.APPROVED app.save() app.save() self.assertEqual(mock_email.call_count, 1) @patch("TWLight.emails.tasks.send_rejection_notification_email") def test_rejection_calls_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.NOT_APPROVED app.save() self.assertTrue(mock_email.called) @patch("TWLight.emails.tasks.send_rejection_notification_email") def test_rerejection_does_not_call_email_function(self, mock_email): app = ApplicationFactory(status=Application.PENDING) app.status = Application.NOT_APPROVED app.save() app.save() self.assertEqual(mock_email.call_count, 1) def test_pending_does_not_call_email_function(self): orig_outbox = len(mail.outbox) _ = ApplicationFactory(status=Application.PENDING) self.assertEqual(len(mail.outbox), orig_outbox) def test_question_does_not_call_email_function(self): orig_outbox = len(mail.outbox) _ = ApplicationFactory(status=Application.QUESTION) self.assertEqual(len(mail.outbox), orig_outbox) def test_sent_does_not_call_email_function(self): orig_outbox = len(mail.outbox) _ = ApplicationFactory(status=Application.SENT) self.assertEqual(len(mail.outbox), orig_outbox) @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_waitlist_calls_email_function(self, mock_email): partner = PartnerFactory(status=Partner.WAITLIST) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertTrue(mock_email.called) partner.delete() app.delete() @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_nonwaitlist_does_not_call_email_function(self, mock_email): partner = PartnerFactory(status=Partner.AVAILABLE) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertFalse(mock_email.called) partner.delete() app.delete() partner = PartnerFactory(status=Partner.NOT_AVAILABLE) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertFalse(mock_email.called) partner.delete() app.delete() @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_waitlisting_partner_calls_email_function(self, mock_email): partner = PartnerFactory(status=Partner.AVAILABLE) app = ApplicationFactory(status=Application.PENDING, partner=partner) self.assertFalse(mock_email.called) partner.status = Partner.WAITLIST partner.save() self.assertTrue(mock_email.called) mock_email.assert_called_with(app) @patch("TWLight.emails.tasks.send_waitlist_notification_email") def test_waitlisting_partner_does_not_call_email_function(self, mock_email): partner = PartnerFactory(status=Partner.AVAILABLE) app = ApplicationFactory(status=Application.APPROVED, partner=partner) app = ApplicationFactory(status=Application.NOT_APPROVED, partner=partner) app = ApplicationFactory(status=Application.SENT, partner=partner) self.assertFalse(mock_email.called) partner.status = Partner.WAITLIST partner.save() self.assertFalse(mock_email.called) class ContactUsTest(TestCase): def setUp(self): super(ContactUsTest, self).setUp() self.editor = EditorFactory(user__email="editor@example.com").user @patch("TWLight.emails.tasks.contact_us_emails") def test_contact_us_emails(self, mock_email): factory = RequestFactory() request = factory.post(get_form_target()) request.user = UserFactory() editor = EditorFactory() reply_to = ["editor@example.com"] cc = ["editor@example.com"] self.assertEqual(len(mail.outbox), 0) mail_instance = MagicMailBuilder(template_mail_cls=InlineCSSTemplateMail) email = mail_instance.contact_us_email( "wikipedialibrary@wikimedia.org", {"editor_wp_username": editor.wp_username, "body": "This is a test email"}, ) email.extra_headers["Reply-To"] = ", ".join(reply_to) email.extra_headers["Cc"] = ", ".join(cc) email.send() self.assertEqual(len(mail.outbox), 1) def test_user_submit_contact_us_emails(self): EditorCraftRoom(self, Terms=True, Coordinator=False) self.assertEqual(len(mail.outbox), 0) contact_us_url = reverse("contact") contact_us = self.client.get(contact_us_url, follow=True) contact_us_form = contact_us.context["form"] data = contact_us_form.initial data["email"] = "editor@example.com" data["message"] = "This is a test" data["cc"] = True data["submit"] = True self.client.post(contact_us_url, data) self.assertEqual(len(mail.outbox), 1) def test_not_logged_in_user_submit_contact_us_emails(self): self.assertEqual(len(mail.outbox), 0) contact_us_url = reverse("contact") contact_us = self.client.get(contact_us_url, follow=True) contact_us_form = contact_us.context["form"] data = contact_us_form.initial data["email"] = "editor@example.com" data["message"] = "This is a test" data["submit"] = True data["cc"] = True self.client.post(contact_us_url, data) self.assertEqual(len(mail.outbox), 0) class UserRenewalNoticeTest(TestCase): def setUp(self): super(UserRenewalNoticeTest, self).setUp() editor = EditorFactory(user__email="editor@example.com") self.user = editor.user self.coordinator = EditorFactory().user coordinators = get_coordinators() coordinators.user_set.add(self.coordinator) self.partner = PartnerFactory() self.authorization = Authorization() self.authorization.user = self.user self.authorization.authorizer = self.coordinator self.authorization.partner = self.partner self.authorization.date_expires = datetime.today() + timedelta(weeks=2) self.authorization.save() def test_single_user_renewal_notice(self): call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.user.email]) def test_user_renewal_notice_disabled(self): self.user.userprofile.send_renewal_notices = False self.user.userprofile.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 0) def test_user_renewal_notice_doesnt_duplicate(self): call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 1) call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 1) def test_user_renewal_notice_past_date(self): self.authorization.date_expires = datetime.today() - timedelta(weeks=1) self.authorization.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 0) def test_user_renewal_notice_future_date(self): self.authorization.date_expires = datetime.today() + timedelta(weeks=8) self.authorization.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 0) def test_user_renewal_notice_future_date_1(self): editor2 = EditorFactory(user__email="editor2@example.com") authorization2 = Authorization() authorization2.user = editor2.user authorization2.authorizer = self.coordinator authorization2.partner = self.partner authorization2.date_expires = datetime.today() + timedelta(weeks=1) authorization2.save() call_command("user_renewal_notice") self.assertEqual(len(mail.outbox), 2) # Make sure that the two emails went to the two expected # email addresses. # This looks a little complicated because mail.outbox[0].to is a # (one element) list, and we need to compare sets to ensure we've self.assertEqual( {mail.outbox[0].to[0], mail.outbox[1].to[0]}, {"editor@example.com", "editor2@example.com"}, ) class CoordinatorReminderEmailTest(TestCase): def setUp(self): super(CoordinatorReminderEmailTest, self).setUp() editor = EditorFactory() self.user = editor.user editor2 = EditorFactory() self.user2 = editor2.user self.coordinator = EditorFactory(user__email="editor@example.com").user coordinators = get_coordinators() coordinators.user_set.add(self.coordinator) self.partner = PartnerFactory(coordinator=self.coordinator) self.partner2 = PartnerFactory(coordinator=self.coordinator) def test_send_coordinator_reminder_email(self): ApplicationFactory( partner=self.partner, status=Application.PENDING, editor=self.user.editor ) self.coordinator.userprofile.pending_app_reminders = False self.coordinator.userprofile.approved_app_reminders = False self.coordinator.userprofile.save() call_command("send_coordinator_reminders") self.assertEqual(len(mail.outbox), 0) ApplicationFactory( partner=self.partner2, status=Application.QUESTION, editor=self.user2.editor ) call_command("send_coordinator_reminders") self.assertEqual(len(mail.outbox), 1) self.assertNotIn("One pending application", mail.outbox[0].body) self.assertIn("One under discussion application", mail.outbox[0].body) self.assertNotIn("One approved application", mail.outbox[0].body) ApplicationFactory( partner=self.partner, status=Application.APPROVED, editor=self.user2.editor ) ApplicationFactory( partner=self.partner2, status=Application.SENT, editor=self.user.editor ) mail.outbox = [] self.coordinator.userprofile.pending_app_reminders = True self.coordinator.userprofile.approved_app_reminders = True self.coordinator.userprofile.save() call_command("send_coordinator_reminders") self.assertEqual(len(mail.outbox), 1) self.assertIn("One pending application", mail.outbox[0].body) self.assertIn("One under discussion application", mail.outbox[0].body) self.assertIn("One approved application", mail.outbox[0].body) class ProxyBundleLaunchTest(TestCase): def setUp(self): super(ProxyBundleLaunchTest, self).setUp() editor = EditorFactory(user__email="editor@example.com") self.user = editor.user def test_proxy_bundle_launch_email_1(self): call_command("proxy_bundle_launch") self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [self.user.email]) def test_proxy_bundle_launch_email_2(self): _ = EditorFactory(user__email="editor@example.com") call_command("proxy_bundle_launch") self.assertEqual(len(mail.outbox), 2) def test_proxy_bundle_launch_email_3(self): self.assertFalse(self.user.userprofile.proxy_notification_sent) call_command("proxy_bundle_launch") self.user.userprofile.refresh_from_db() self.assertTrue(self.user.userprofile.proxy_notification_sent) def test_proxy_bundle_launch_email_4(self): self.user.userprofile.proxy_notification_sent = True self.user.userprofile.save() call_command("proxy_bundle_launch") self.assertEqual(len(mail.outbox), 0)
true
true
f711e6ecb38eb136dcbc93753ad3f1ee797ef1ce
26,461
py
Python
app/main/routes_zseries_api.py
zuiko42/picobrew_pico
77d39fa4fe3f0361cca34bdf33eadf461fdd5ca7
[ "MIT" ]
null
null
null
app/main/routes_zseries_api.py
zuiko42/picobrew_pico
77d39fa4fe3f0361cca34bdf33eadf461fdd5ca7
[ "MIT" ]
null
null
null
app/main/routes_zseries_api.py
zuiko42/picobrew_pico
77d39fa4fe3f0361cca34bdf33eadf461fdd5ca7
[ "MIT" ]
1
2020-09-16T19:39:59.000Z
2020-09-16T19:39:59.000Z
import json import uuid import os from datetime import datetime from flask import current_app, request, Response, abort, send_from_directory from webargs import fields from webargs.flaskparser import use_args, FlaskParser from enum import Enum from random import seed, randint from .. import socketio from . import main from .config import MachineType, brew_active_sessions_path, zseries_firmware_path from .firmware import firmware_filename, firmware_upgrade_required, minimum_firmware from .model import PicoBrewSession from .routes_frontend import get_zseries_recipes, parse_brew_session, list_brew_session_files, load_brew_sessions from .session_parser import active_brew_sessions from .units import convert_temp arg_parser = FlaskParser() seed(1) events = {} class SessionType(int, Enum): RINSE = 0 CLEAN = 1 DRAIN = 2 RACK_BEER = 3 CIRCULATE = 4 SOUS_VIDE = 5 BEER = 6 STILL = 11 COFFEE = 12 CHILL = 13 MANUAL = 14 class ZProgramId(int, Enum): RINSE = 1 DRAIN = 2 RACK_BEER = 3 CIRRCULATE = 4 SOUS_VIDE = 6 CLEAN = 12 BEER_OR_COFFEE = 24 STILL = 26 CHILL = 27 # Get Firmware: /firmware/zseries/<version> # Response: RAW Bin File @main.route('/firmware/zseries/<file>', methods=['GET']) def process_zseries_firmware(file): current_app.logger.debug('DEBUG: ZSeries fetch firmware file={}'.format(file)) return send_from_directory(zseries_firmware_path(), file) # ZState: PUT /Vendors/input.cshtml?type=ZState&token={} # Response: Machine State Response (firmware, boilertype, session stats, reg token) zseries_query_args = { 'type': fields.Str(required=False), # API request type identifier 'token': fields.Str(required=True), # alpha-numeric unique identifier for Z 'id': fields.Str(required=False), # alpha-numeric unique identifier for Z session/recipe 'ctl': fields.Str(required=False) # recipe list request doesn't use `type` param } # ZFetchRecipeSummary: POST /Vendors/input.cshtml?ctl=RecipeRefListController&token={} # Response: Recipes Response # ZSessionUpdate: POST /Vendors/input.cshtml?type=ZSessionLog&token={} # Response: Echo Session Log Request with ID and Date # ZSession: POST /Vendors/input.cshtml?type=ZSession&token={}&id={} // id == session_id is only present on "complete session" request # Response: Machine State Response (firmware, boilertype, session stats, reg token) # StillRequest: POST /Vendors/input.cshtml?type=StillRequest&token={} # Response: Still Machine State Response (clean acknowlegement, current firmware, update firmware, user registration) @main.route('/Vendors/input.cshtml', methods=['POST']) @use_args(zseries_query_args, location='querystring') def process_zseries_post_request(args): type = request.args.get('type') controller = request.args.get('ctl') # current_app.logger.debug('DEBUG: ZSeries POST request args = {}; request = {}'.format(args, request.json)) if controller == 'RecipeRefListController': body = request.json ret = { "Kind": body['Kind'], "MaxCount": body['MaxCount'], "Offset": body['Offset'], "Recipes": get_zseries_recipe_metadata_list() } return Response(json.dumps(ret), mimetype='application/json') elif type == 'ZSessionLog': return update_session_log(request.args.get('token'), request.json) elif type == 'ZSession': return create_or_close_session(request) elif type == 'StillRequest': return register_picostill(request.json) else: abort(404) # ZState: PUT /Vendors/input.cshtml?type=ZState&token={} # Response: Machine State Response (firmware, boilertype, session stats, reg token) # ZSession: PUT /Vendors/input.cshtml?type=ZSession&token={}&id={} // id == session_id is only present on "complete session" request # Response: Machine State Response (firmware, boilertype, session stats, reg token) @main.route('/Vendors/input.cshtml', methods=['PUT']) @use_args(zseries_query_args, location='querystring') def process_zseries_put_request(args): type = request.args.get('type') if type == 'ZState' and request.json['CurrentFirmware']: return process_zstate(request) elif type == 'ZSession': return create_or_close_session(request) else: abort(404) # ZRecipeDetails: GET /Vendors/input.cshtml?type=Recipe&token={}&id={} // id == recipe_id # Response: Remaining Recipe Steps (based on last session update from machine) # ZResumeSession: GET /Vendors/input.cshtml?type=ResumableSession&token={}&id={} // id == session_id # Response: Remaining Recipe Steps (based on last session update from machine) @main.route('/Vendors/input.cshtml', methods=['GET']) @use_args(zseries_query_args, location='querystring') def process_zseries_get_request(args): type = request.args.get('type') identifier = request.args.get('id') # current_app.logger.debug('DEBUG: ZSeries GET request args = {};'.format(args)) if type == 'Recipe' and identifier is not None: return process_recipe_request(identifier) elif type == 'ResumableSession' and identifier is not None: return process_recover_session(request.args.get('token'), identifier) else: abort(404) # GET /Vendors/input.cshtml?type=Recipe&token={}&id={} // id == recipe_id def process_recipe_request(recipe_id): recipe = get_recipe_by_id(recipe_id) return recipe.serialize() # Request: /Vendors/input.cshtml?type=ZState&token=<token> # { "BoilerType": 1|2, "CurrentFirmware": "1.2.3" } # Response (example): # { # "Alias": "ZSeries", # "BoilerType": 1, # "CurrentFirmware": "0.0.116", # "IsRegistered": true, # "IsUpdated": true, # "ProgramUri": null, # "RegistrationToken": "-1", # "SessionStats": { # "DirtySessionsSinceClean": 1, # "LastSessionType": 5, # "ResumableSessionID": -1 # }, # "TokenExpired": false, # "UpdateAddress": "-1", # "UpdateToFirmware": null, # "ZBackendError": 0 # } def process_zstate(args): uid = request.args['token'] if uid not in active_brew_sessions: active_brew_sessions[uid] = PicoBrewSession(MachineType.ZSERIES) json = request.json update_required = firmware_upgrade_required(MachineType.ZSERIES, json['CurrentFirmware']) firmware_source = "https://picobrew.com/firmware/zseries/{}".format(firmware_filename(MachineType.ZSERIES, minimum_firmware(MachineType.ZSERIES))) returnVal = { "Alias": zseries_alias(uid), "BoilerType": json.get('BoilerType', None), # TODO sometimes machine loses boilertype, need to resync with known state "IsRegistered": True, # likely we don't care about registration with BYOS "IsUpdated": False if update_required else True, "ProgramUri": None, # what is this? "RegistrationToken": -1, "SessionStats": { "DirtySessionsSinceClean": dirty_sessions_since_clean(uid), "LastSessionType": last_session_type(uid), "ResumableSessionID": resumable_session_id(uid) }, "UpdateAddress": firmware_source if update_required else "-1", "UpdateToFirmware": None, "ZBackendError": 0 } return returnVal def dirty_sessions_since_clean(uid): brew_session_files = list_brew_session_files(uid) post_clean_sessions = [] clean_found = False for s in brew_session_files: session_type = SessionType(session_type_from_filename(s)) if (session_type == SessionType.CLEAN): clean_found = True if (not clean_found and session_type in [SessionType.BEER.value, SessionType.COFFEE.value, SessionType.SOUS_VIDE.value]): post_clean_sessions.append(s) return len(post_clean_sessions) def last_session_type(uid): brew_sessions = list_brew_session_files(uid) if len(brew_sessions) == 0: return SessionType.CLEAN else: return SessionType(session_type_from_filename(brew_sessions[0])) or SessionType.RINSE def resumable_session_id(uid): if uid not in active_brew_sessions: return -1 return active_brew_sessions[uid].id def zseries_alias(uid): if uid not in active_brew_sessions: return "ZSeries" return active_brew_sessions[uid].alias or "ZSeries" def create_or_close_session(args): session_id = request.args.get('id') if session_id: return close_session(request.args.get('token'), session_id, request.json) else: return create_session(request.args.get('token'), request.json) # Request: /Vendor/input.cshtml?type=StillRequest&token=<> # { "HasCleanedAck": false, "MachineType": 2, "MachineUID": "240ac41d9ae4", "PicoStillUID": "30aea46e6a40" } # Response: # { # "HasCleanedAck": false, # "MachineType": 2, # "MachineUID": "240ac41d9ae4", # "PicoStill": { # "CleanedAckDate": null, # "CreationDate": "2018-07-06T15:05:56.57", # "CurrentFirmware": "0.0.30", # "FactoryFlashVersion": null, # "ID": 638, # "LastCommunication": "2020-07-11T00:09:51.17", # "Notes": null, # "ProfileID": 28341, # "SerialNumber": "ST180706080552", # "UID": "30aea46e6a40", # "UpdateToFirmware": null # }, # "PicoStillUID": "30aea46e6a40" # } def register_picostill(args): return { "HasCleanedAck": args.get('HasCleanedAck'), "MachineType": args.get('MachineType'), "MachineUID": args.get('MachineUID'), "PicoStill": { "CleanedAckDate": datetime.utcnow().isoformat() if args.get('HasCleanedAck') else None, # date of last cleaning "CreationDate": "2018-07-06T15:05:56.57", # date of manufacturing? (never sent to server) "CurrentFirmware": "0.0.30", "FactoryFlashVersion": None, "ID": 1234, # auto incremented picostill number? (never sent to server) "LastCommunication": datetime.utcnow().isoformat(), "Notes": None, "ProfileID": 28341, # how to get the userId? "SerialNumber": "ST123456780123", # device serial number (never sent to server) "UID": args.get('PicoStillUID'), "UpdateToFirmware": None }, "PicoStillUID": args.get('PicoStillUID'), } # Request: /Vendors/input.cshtml?type=ZSession&token=<token>&id=<session_id> # (example - beer session): # { # "DurationSec": 11251, # "FirmwareVersion": "0.0.119", # "GroupSession": true, # "MaxTemp": 98.22592515, # "MaxTempAddedSec": 0, # "Name": "All Good Things", # "PressurePa": 101975.6172, # "ProgramParams": { # "Abv": -1, # not a customization feature on the Z # "Duration": 0, # "Ibu": -1, # "Intensity": 0, # "Temperature": 0, # "Water": 13.1 # }, # "RecipeID": "150xxx", # "SessionType": 6, # see options in SessionType # "ZProgramId": 24 # see options in ZProgram # } # Response (example - begin session): # { # "Active": false, # "CityLat": xx.xxxxxx, # "CityLng": -yyy.yyyyyy, # "ClosingDate": "2020-05-04T19:54:58.74", # "CreationDate": "2020-05-04T19:46:04.153", # "Deleted": false, # "DurationSec": 578, # "FirmwareVersion": "0.0.119", # "GUID": "<all upper case machine guid>", # "GroupSession": false, # "ID": <session-id>, # "LastLogID": 11407561, # "Lat": xx.xxxxxx, # "Lng": -yyy.yyyyyy, # "MaxTemp": 98.24455443, # "MaxTempAddedSec": 0, # "Name": "RINSE", # "Notes": null, # "Pressure": 0, # "ProfileID": zzzz, # "ProgramParams": { # "Abv": null, # not a customization feature on the Z # "Duration": 0.0, # "Ibu": null, # "Intensity": 0.0, # "Temperature": 0.0, # "Water": 0.0 # }, # "RecipeGuid": null, # "RecipeID": null, # "SecondsRemaining": 0, # "SessionLogs": [], # "SessionType": 0, # "StillUID": null, # "StillVer": null, # "ZProgramId": 1, # "ZSeriesID": www # } # Request (start still session) # { # "DurationSec": 14, # "FirmwareVersion": "0.0.119", # "GroupSession": true, # "MaxTemp": 97.71027899, # "MaxTempAddedSec": 0, # "Name": "PICOSTILL", # "PressurePa": 100490.6641, # "ProgramParams": { # "Abv": -1, # "Duration": 0, # "Ibu": -1, # "Intensity": 0, # "Temperature": 0, # "Water": 0 # }, # "RecipeID": -1, # "SessionType": 11, # "StillUID": "30aea46e6a40", # "StillVer": "0.0.30", # "ZProgramId": 26 # } # def create_session(token, body): uid = token # token uniquely identifies the machine still_uid = body.get('StillUID') # token uniquely identifying the still (req: for still sessions) recipe = get_recipe_by_name(body['Name']) # error out if recipe isn't known where session is beer type (ie 6) # due to rinse, rack beer, clean, coffee, sous vide, etc not having server known "recipes" if recipe is None and body['SessionType'] == SessionType.BEER: error = { 'error': 'recipe \'{}\' not found - unable to start session'.format(body['Name']) } return Response(json.dumps(error), status=404, mimetype='application/json') elif recipe: current_app.logger.debug('recipe for session: {}'.format(recipe.serialize())) if uid not in active_brew_sessions: active_brew_sessions[uid] = PicoBrewSession(MachineType.ZSERIES) session_guid = uuid.uuid4().hex[:32] session_id = increment_session_id(uid) active_session = active_brew_sessions[uid] active_session.session = session_guid active_session.id = session_id active_session.created_at = datetime.utcnow() active_session.name = recipe.name if recipe else body['Name'] active_session.type = body['SessionType'] # replace spaces and '#' with other character sequences encoded_recipe = active_brew_sessions[uid].name.replace(' ', '_').replace("#", "%23") filename = '{0}#{1}#{2}#{3}#{4}.json'.format( datetime.now().strftime('%Y%m%d_%H%M%S'), uid, active_session.session, encoded_recipe, active_session.type) active_session.filepath = brew_active_sessions_path().joinpath(filename) current_app.logger.debug('ZSeries - session file created {}'.format(active_session.filepath)) if session_id not in events: events[session_id] = [] active_session.file = open(active_session.filepath, 'w') active_session.file.write('[') active_session.file.flush() ret = { "Active": False, "ClosingDate": None, "CreationDate": active_session.created_at.isoformat(), "Deleted": False, "DurationSec": body['DurationSec'], "FirmwareVersion": body['FirmwareVersion'], "GroupSession": body['GroupSession'] or False, # Z2 or Z+PicoStill Session "GUID": active_session.session, "ID": active_session.id, "LastLogID": active_session.id, "MaxTemp": body['MaxTemp'], "MaxTempAddedSec": body['MaxTempAddedSec'], "Name": active_session.name, "Notes": None, "Pressure": body['PressurePa'], # related to an attached picostill "ProfileID": 28341, # how to get the userId "SecondsRemaining": 0, "SessionLogs": [], "SessionType": active_session.type, "StillUID": still_uid, "StillVer": body.get('StillVer'), "ZProgramId": body['ZProgramId'], "ZSeriesID": uid } if 'ProgramParams' in body: ret.update({ "ProgramParams": body['ProgramParams'] }) if 'RecipeID' in body: ret.update({ "RecipeGuid": None, "RecipeID": body['RecipeID'] }) return ret def update_session_log(token, body): session_id = body['ZSessionID'] active_session = active_brew_sessions[token] if active_session.id == -1: # update reference to corrupted active_session # upon file load with -1 (assume this is the right session to log with) active_session.id = session_id elif active_session.id != session_id: # session_id is hex string; session.id is number current_app.logger.warn('WARN: ZSeries reported session_id not active session') error = { 'error': 'matching server log identifier {} does not match requested session_id {}'.format(active_session.id, session_id) } return Response(json.dumps(error), status=400, mimetype='application/json') if active_session not in events: events[active_session] = [] if active_session.recovery != body['StepName']: events[active_session].append(body['StepName']) active_session.step = body['StepName'] log_time = datetime.utcnow() session_data = { 'time': ((log_time - datetime(1970, 1, 1)).total_seconds() * 1000), 'timeStr': log_time.isoformat(), 'timeLeft': body['SecondsRemaining'], 'step': body['StepName'], # temperatures from Z are in celsius vs prior device series 'target': convert_temp(body['TargetTemp'], 'F'), 'ambient': convert_temp(body['AmbientTemp'], 'F'), 'drain': convert_temp(body['DrainTemp'], 'F'), 'wort': convert_temp(body['WortTemp'], 'F'), 'therm': convert_temp(body['ThermoBlockTemp'], 'F'), 'recovery': body['StepName'], 'position': body['ValvePosition'] } event = None if active_session in events and len(events[active_session]) > 0: if len(events[active_session]) > 1: current_app.logger.debug('DEBUG: ZSeries events > 1 - size = {}'.format(len(events[active_session]))) event = events[active_session].pop(0) session_data.update({'event': event}) active_session.data.append(session_data) active_session.recovery = body['StepName'] active_session.remaining_time = body['SecondsRemaining'] # for Z graphs we have more data available: wort, hex/therm, target, drain, ambient graph_update = json.dumps({'time': session_data['time'], 'data': [session_data['target'], session_data['wort'], session_data['therm'], session_data['drain'], session_data['ambient']], 'session': active_session.name, 'step': active_session.step, 'event': event, }) socketio.emit('brew_session_update|{}'.format(token), graph_update) active_session.file.write('\n\t{},'.format(json.dumps(session_data))) active_session.file.flush() ret = { "ID": randint(0, 10000), "LogDate": session_data['timeStr'], } ret.update(body) return ret def close_session(uid, session_id, body): active_session = active_brew_sessions[uid] ret = { "Active": False, "ClosingDate": datetime.utcnow().isoformat(), "CreationDate": active_session.created_at.isoformat(), "Deleted": False, "DurationSec": body['DurationSec'], "FirmwareVersion": body['FirmwareVersion'], "GUID": active_session.session, "ID": active_session.id, "LastLogID": active_session.id, "MaxTemp": body['MaxTemp'], "MaxTempAddedSec": body['MaxTempAddedSec'], "Name": active_session.name, "Notes": None, "Pressure": 0, # is this related to an attached picostill? "ProfileID": 28341, # how to get the userId "SecondsRemaining": 0, "SessionLogs": [], "SessionType": body['SessionType'], "StillUID": body.get('StillUID'), "StillVer": body.get('StillVer'), "ZProgramId": body['ZProgramId'], "ZSeriesID": uid } if 'ProgramParams' in body: ret.update({ "ProgramParams": body['ProgramParams'] }) if 'RecipeID' in body: ret.update({ "RecipeGuid": None, "RecipeID": body['RecipeID'] }) active_session.file.seek(0, os.SEEK_END) active_session.file.seek(active_session.file.tell() - 1, os.SEEK_SET) # Remove trailing , from last data set active_session.file.write('\n]\n') active_session.cleanup() return ret # GET /Vendors/input.cshtml?type=ResumableSession&token=<token>&id=<session_id> HTTP/1.1 def process_recover_session(token, session_id): # TODO can one recover a RINSE / CLEAN or otherwise non-BEER or COFFEE session? uid = get_machine_by_session(session_id) if uid is None: error = { 'error': 'session_id {} not found to be active - unable to resume session'.format(session_id) } return Response(json.dumps(error), status=400, mimetype='application/json') active_session = active_brew_sessions[uid] if active_session.id == session_id: # session_id is hex string; session.id is number recipe = get_recipe_by_name(active_session.name) current_step = active_session.recovery remaining_time = active_session.remaining_time steps = [] step_found = False for s in recipe.steps: if (s.name == current_step): step_found = True if (step_found): steps.append(s) if (not step_found or len(steps) == 0): current_app.logger.warn("most recently logged step not found in linked recipe steps") error = { 'error': 'active brew session\'s most recently logged step not found in linked recipe' } return Response(json.dumps(error), status=400, mimetype='application/json') if (len(steps) >= 1): current_app.logger.debug("ZSeries step_count={}, active_step={}, time_remaining={}".format(len(steps), current_step, remaining_time)) # modify runtime of the first step (most recently active) steps[0].step_time = remaining_time recipe.steps = steps ret = { "Recipe": json.loads(recipe.serialize()), "SessionID": active_session.id, "SessionType": active_session.type, "ZPicoRecipe": None # does this identity the Z pak recipe? } return ret else: error = { 'error': 'matching server log identifier {} does not match requested session_id {}'.format(active_session.id, session_id) } return Response(json.dumps(error), status=400, mimetype='application/json') # -------- Utility -------- def get_zseries_recipe_list(): recipe_list = [] for r in get_zseries_recipes(): recipe_list.append(r) return recipe_list def get_zseries_recipe_metadata_list(): recipe_metadata = [] for r in get_zseries_recipes(): meta = { "ID": r.id, "Name": r.name, "Kind": r.kind_code, "Uri": None, "Abv": -1, "Ibu": -1 } recipe_metadata.append(meta) return recipe_metadata def get_recipe_by_id(recipe_id): recipe = next((r for r in get_zseries_recipes() if str(r.id) == str(recipe_id)), None) return recipe def get_recipe_by_name(recipe_name): recipe = next((r for r in get_zseries_recipes() if r.name == recipe_name), None) return recipe def increment_session_id(uid): return len(list_brew_session_files(uid)) + (1 if active_brew_sessions[uid].session != '' else 1) def get_machine_by_session(session_id): return next((uid for uid in active_brew_sessions if active_brew_sessions[uid].session == session_id or active_brew_sessions[uid].id == int(session_id) or active_brew_sessions[uid].id == -1), None) def get_archived_sessions_by_machine(uid): brew_sessions = load_brew_sessions(uid=uid) return brew_sessions def session_type_from_filename(filename): info = filename.stem.split('#') session_type = SessionType.BEER try: if len(info) > 4: session_type = int(info[4]) except Exception as error: current_app.logger.warn("error occurred reading {}".format(filename),) return session_type
38.79912
200
0.586713
import json import uuid import os from datetime import datetime from flask import current_app, request, Response, abort, send_from_directory from webargs import fields from webargs.flaskparser import use_args, FlaskParser from enum import Enum from random import seed, randint from .. import socketio from . import main from .config import MachineType, brew_active_sessions_path, zseries_firmware_path from .firmware import firmware_filename, firmware_upgrade_required, minimum_firmware from .model import PicoBrewSession from .routes_frontend import get_zseries_recipes, parse_brew_session, list_brew_session_files, load_brew_sessions from .session_parser import active_brew_sessions from .units import convert_temp arg_parser = FlaskParser() seed(1) events = {} class SessionType(int, Enum): RINSE = 0 CLEAN = 1 DRAIN = 2 RACK_BEER = 3 CIRCULATE = 4 SOUS_VIDE = 5 BEER = 6 STILL = 11 COFFEE = 12 CHILL = 13 MANUAL = 14 class ZProgramId(int, Enum): RINSE = 1 DRAIN = 2 RACK_BEER = 3 CIRRCULATE = 4 SOUS_VIDE = 6 CLEAN = 12 BEER_OR_COFFEE = 24 STILL = 26 CHILL = 27 @main.route('/firmware/zseries/<file>', methods=['GET']) def process_zseries_firmware(file): current_app.logger.debug('DEBUG: ZSeries fetch firmware file={}'.format(file)) return send_from_directory(zseries_firmware_path(), file) zseries_query_args = { 'type': fields.Str(required=False), 'token': fields.Str(required=True), 'id': fields.Str(required=False), 'ctl': fields.Str(required=False) } # ZFetchRecipeSummary: POST /Vendors/input.cshtml?ctl=RecipeRefListController&token={} # Response: Recipes Response # ZSessionUpdate: POST /Vendors/input.cshtml?type=ZSessionLog&token={} # Response: Echo Session Log Request with ID and Date # ZSession: POST /Vendors/input.cshtml?type=ZSession&token={}&id={} // id == session_id is only present on "complete session" request # Response: Machine State Response (firmware, boilertype, session stats, reg token) # StillRequest: POST /Vendors/input.cshtml?type=StillRequest&token={} # Response: Still Machine State Response (clean acknowlegement, current firmware, update firmware, user registration) @main.route('/Vendors/input.cshtml', methods=['POST']) @use_args(zseries_query_args, location='querystring') def process_zseries_post_request(args): type = request.args.get('type') controller = request.args.get('ctl') # current_app.logger.debug('DEBUG: ZSeries POST request args = {}; request = {}'.format(args, request.json)) if controller == 'RecipeRefListController': body = request.json ret = { "Kind": body['Kind'], "MaxCount": body['MaxCount'], "Offset": body['Offset'], "Recipes": get_zseries_recipe_metadata_list() } return Response(json.dumps(ret), mimetype='application/json') elif type == 'ZSessionLog': return update_session_log(request.args.get('token'), request.json) elif type == 'ZSession': return create_or_close_session(request) elif type == 'StillRequest': return register_picostill(request.json) else: abort(404) # ZState: PUT /Vendors/input.cshtml?type=ZState&token={} # Response: Machine State Response (firmware, boilertype, session stats, reg token) # ZSession: PUT /Vendors/input.cshtml?type=ZSession&token={}&id={} // id == session_id is only present on "complete session" request # Response: Machine State Response (firmware, boilertype, session stats, reg token) @main.route('/Vendors/input.cshtml', methods=['PUT']) @use_args(zseries_query_args, location='querystring') def process_zseries_put_request(args): type = request.args.get('type') if type == 'ZState' and request.json['CurrentFirmware']: return process_zstate(request) elif type == 'ZSession': return create_or_close_session(request) else: abort(404) # ZRecipeDetails: GET /Vendors/input.cshtml?type=Recipe&token={}&id={} // id == recipe_id # Response: Remaining Recipe Steps (based on last session update from machine) # ZResumeSession: GET /Vendors/input.cshtml?type=ResumableSession&token={}&id={} // id == session_id # Response: Remaining Recipe Steps (based on last session update from machine) @main.route('/Vendors/input.cshtml', methods=['GET']) @use_args(zseries_query_args, location='querystring') def process_zseries_get_request(args): type = request.args.get('type') identifier = request.args.get('id') # current_app.logger.debug('DEBUG: ZSeries GET request args = {};'.format(args)) if type == 'Recipe' and identifier is not None: return process_recipe_request(identifier) elif type == 'ResumableSession' and identifier is not None: return process_recover_session(request.args.get('token'), identifier) else: abort(404) # GET /Vendors/input.cshtml?type=Recipe&token={}&id={} // id == recipe_id def process_recipe_request(recipe_id): recipe = get_recipe_by_id(recipe_id) return recipe.serialize() # Request: /Vendors/input.cshtml?type=ZState&token=<token> # { "BoilerType": 1|2, "CurrentFirmware": "1.2.3" } # Response (example): # { # "Alias": "ZSeries", # "BoilerType": 1, # "CurrentFirmware": "0.0.116", # "IsRegistered": true, # "IsUpdated": true, # "ProgramUri": null, # "RegistrationToken": "-1", # "SessionStats": { # "DirtySessionsSinceClean": 1, # "LastSessionType": 5, # "ResumableSessionID": -1 # }, # "TokenExpired": false, # "UpdateAddress": "-1", # "UpdateToFirmware": null, # "ZBackendError": 0 # } def process_zstate(args): uid = request.args['token'] if uid not in active_brew_sessions: active_brew_sessions[uid] = PicoBrewSession(MachineType.ZSERIES) json = request.json update_required = firmware_upgrade_required(MachineType.ZSERIES, json['CurrentFirmware']) firmware_source = "https://picobrew.com/firmware/zseries/{}".format(firmware_filename(MachineType.ZSERIES, minimum_firmware(MachineType.ZSERIES))) returnVal = { "Alias": zseries_alias(uid), "BoilerType": json.get('BoilerType', None), # TODO sometimes machine loses boilertype, need to resync with known state "IsRegistered": True, # likely we don't care about registration with BYOS "IsUpdated": False if update_required else True, "ProgramUri": None, "RegistrationToken": -1, "SessionStats": { "DirtySessionsSinceClean": dirty_sessions_since_clean(uid), "LastSessionType": last_session_type(uid), "ResumableSessionID": resumable_session_id(uid) }, "UpdateAddress": firmware_source if update_required else "-1", "UpdateToFirmware": None, "ZBackendError": 0 } return returnVal def dirty_sessions_since_clean(uid): brew_session_files = list_brew_session_files(uid) post_clean_sessions = [] clean_found = False for s in brew_session_files: session_type = SessionType(session_type_from_filename(s)) if (session_type == SessionType.CLEAN): clean_found = True if (not clean_found and session_type in [SessionType.BEER.value, SessionType.COFFEE.value, SessionType.SOUS_VIDE.value]): post_clean_sessions.append(s) return len(post_clean_sessions) def last_session_type(uid): brew_sessions = list_brew_session_files(uid) if len(brew_sessions) == 0: return SessionType.CLEAN else: return SessionType(session_type_from_filename(brew_sessions[0])) or SessionType.RINSE def resumable_session_id(uid): if uid not in active_brew_sessions: return -1 return active_brew_sessions[uid].id def zseries_alias(uid): if uid not in active_brew_sessions: return "ZSeries" return active_brew_sessions[uid].alias or "ZSeries" def create_or_close_session(args): session_id = request.args.get('id') if session_id: return close_session(request.args.get('token'), session_id, request.json) else: return create_session(request.args.get('token'), request.json) def register_picostill(args): return { "HasCleanedAck": args.get('HasCleanedAck'), "MachineType": args.get('MachineType'), "MachineUID": args.get('MachineUID'), "PicoStill": { "CleanedAckDate": datetime.utcnow().isoformat() if args.get('HasCleanedAck') else None, "CreationDate": "2018-07-06T15:05:56.57", "CurrentFirmware": "0.0.30", "FactoryFlashVersion": None, "ID": 1234, "LastCommunication": datetime.utcnow().isoformat(), "Notes": None, "ProfileID": 28341, "SerialNumber": "ST123456780123", "UID": args.get('PicoStillUID'), "UpdateToFirmware": None }, "PicoStillUID": args.get('PicoStillUID'), } still_uid = body.get('StillUID') recipe = get_recipe_by_name(body['Name']) # due to rinse, rack beer, clean, coffee, sous vide, etc not having server known "recipes" if recipe is None and body['SessionType'] == SessionType.BEER: error = { 'error': 'recipe \'{}\' not found - unable to start session'.format(body['Name']) } return Response(json.dumps(error), status=404, mimetype='application/json') elif recipe: current_app.logger.debug('recipe for session: {}'.format(recipe.serialize())) if uid not in active_brew_sessions: active_brew_sessions[uid] = PicoBrewSession(MachineType.ZSERIES) session_guid = uuid.uuid4().hex[:32] session_id = increment_session_id(uid) active_session = active_brew_sessions[uid] active_session.session = session_guid active_session.id = session_id active_session.created_at = datetime.utcnow() active_session.name = recipe.name if recipe else body['Name'] active_session.type = body['SessionType'] # replace spaces and ' encoded_recipe = active_brew_sessions[uid].name.replace(' ', '_').replace("#", "%23") filename = '{0} active_session.session, encoded_recipe, active_session.type) active_session.filepath = brew_active_sessions_path().joinpath(filename) current_app.logger.debug('ZSeries - session file created {}'.format(active_session.filepath)) if session_id not in events: events[session_id] = [] active_session.file = open(active_session.filepath, 'w') active_session.file.write('[') active_session.file.flush() ret = { "Active": False, "ClosingDate": None, "CreationDate": active_session.created_at.isoformat(), "Deleted": False, "DurationSec": body['DurationSec'], "FirmwareVersion": body['FirmwareVersion'], "GroupSession": body['GroupSession'] or False, # Z2 or Z+PicoStill Session "GUID": active_session.session, "ID": active_session.id, "LastLogID": active_session.id, "MaxTemp": body['MaxTemp'], "MaxTempAddedSec": body['MaxTempAddedSec'], "Name": active_session.name, "Notes": None, "Pressure": body['PressurePa'], # related to an attached picostill "ProfileID": 28341, # how to get the userId "SecondsRemaining": 0, "SessionLogs": [], "SessionType": active_session.type, "StillUID": still_uid, "StillVer": body.get('StillVer'), "ZProgramId": body['ZProgramId'], "ZSeriesID": uid } if 'ProgramParams' in body: ret.update({ "ProgramParams": body['ProgramParams'] }) if 'RecipeID' in body: ret.update({ "RecipeGuid": None, "RecipeID": body['RecipeID'] }) return ret def update_session_log(token, body): session_id = body['ZSessionID'] active_session = active_brew_sessions[token] if active_session.id == -1: # update reference to corrupted active_session # upon file load with -1 (assume this is the right session to log with) active_session.id = session_id elif active_session.id != session_id: # session_id is hex string; session.id is number current_app.logger.warn('WARN: ZSeries reported session_id not active session') error = { 'error': 'matching server log identifier {} does not match requested session_id {}'.format(active_session.id, session_id) } return Response(json.dumps(error), status=400, mimetype='application/json') if active_session not in events: events[active_session] = [] if active_session.recovery != body['StepName']: events[active_session].append(body['StepName']) active_session.step = body['StepName'] log_time = datetime.utcnow() session_data = { 'time': ((log_time - datetime(1970, 1, 1)).total_seconds() * 1000), 'timeStr': log_time.isoformat(), 'timeLeft': body['SecondsRemaining'], 'step': body['StepName'], # temperatures from Z are in celsius vs prior device series 'target': convert_temp(body['TargetTemp'], 'F'), 'ambient': convert_temp(body['AmbientTemp'], 'F'), 'drain': convert_temp(body['DrainTemp'], 'F'), 'wort': convert_temp(body['WortTemp'], 'F'), 'therm': convert_temp(body['ThermoBlockTemp'], 'F'), 'recovery': body['StepName'], 'position': body['ValvePosition'] } event = None if active_session in events and len(events[active_session]) > 0: if len(events[active_session]) > 1: current_app.logger.debug('DEBUG: ZSeries events > 1 - size = {}'.format(len(events[active_session]))) event = events[active_session].pop(0) session_data.update({'event': event}) active_session.data.append(session_data) active_session.recovery = body['StepName'] active_session.remaining_time = body['SecondsRemaining'] # for Z graphs we have more data available: wort, hex/therm, target, drain, ambient graph_update = json.dumps({'time': session_data['time'], 'data': [session_data['target'], session_data['wort'], session_data['therm'], session_data['drain'], session_data['ambient']], 'session': active_session.name, 'step': active_session.step, 'event': event, }) socketio.emit('brew_session_update|{}'.format(token), graph_update) active_session.file.write('\n\t{},'.format(json.dumps(session_data))) active_session.file.flush() ret = { "ID": randint(0, 10000), "LogDate": session_data['timeStr'], } ret.update(body) return ret def close_session(uid, session_id, body): active_session = active_brew_sessions[uid] ret = { "Active": False, "ClosingDate": datetime.utcnow().isoformat(), "CreationDate": active_session.created_at.isoformat(), "Deleted": False, "DurationSec": body['DurationSec'], "FirmwareVersion": body['FirmwareVersion'], "GUID": active_session.session, "ID": active_session.id, "LastLogID": active_session.id, "MaxTemp": body['MaxTemp'], "MaxTempAddedSec": body['MaxTempAddedSec'], "Name": active_session.name, "Notes": None, "Pressure": 0, # is this related to an attached picostill? "ProfileID": 28341, # how to get the userId "SecondsRemaining": 0, "SessionLogs": [], "SessionType": body['SessionType'], "StillUID": body.get('StillUID'), "StillVer": body.get('StillVer'), "ZProgramId": body['ZProgramId'], "ZSeriesID": uid } if 'ProgramParams' in body: ret.update({ "ProgramParams": body['ProgramParams'] }) if 'RecipeID' in body: ret.update({ "RecipeGuid": None, "RecipeID": body['RecipeID'] }) active_session.file.seek(0, os.SEEK_END) active_session.file.seek(active_session.file.tell() - 1, os.SEEK_SET) # Remove trailing , from last data set active_session.file.write('\n]\n') active_session.cleanup() return ret # GET /Vendors/input.cshtml?type=ResumableSession&token=<token>&id=<session_id> HTTP/1.1 def process_recover_session(token, session_id): # TODO can one recover a RINSE / CLEAN or otherwise non-BEER or COFFEE session? uid = get_machine_by_session(session_id) if uid is None: error = { 'error': 'session_id {} not found to be active - unable to resume session'.format(session_id) } return Response(json.dumps(error), status=400, mimetype='application/json') active_session = active_brew_sessions[uid] if active_session.id == session_id: # session_id is hex string; session.id is number recipe = get_recipe_by_name(active_session.name) current_step = active_session.recovery remaining_time = active_session.remaining_time steps = [] step_found = False for s in recipe.steps: if (s.name == current_step): step_found = True if (step_found): steps.append(s) if (not step_found or len(steps) == 0): current_app.logger.warn("most recently logged step not found in linked recipe steps") error = { 'error': 'active brew session\'s most recently logged step not found in linked recipe' } return Response(json.dumps(error), status=400, mimetype='application/json') if (len(steps) >= 1): current_app.logger.debug("ZSeries step_count={}, active_step={}, time_remaining={}".format(len(steps), current_step, remaining_time)) steps[0].step_time = remaining_time recipe.steps = steps ret = { "Recipe": json.loads(recipe.serialize()), "SessionID": active_session.id, "SessionType": active_session.type, "ZPicoRecipe": None } return ret else: error = { 'error': 'matching server log identifier {} does not match requested session_id {}'.format(active_session.id, session_id) } return Response(json.dumps(error), status=400, mimetype='application/json') def get_zseries_recipe_list(): recipe_list = [] for r in get_zseries_recipes(): recipe_list.append(r) return recipe_list def get_zseries_recipe_metadata_list(): recipe_metadata = [] for r in get_zseries_recipes(): meta = { "ID": r.id, "Name": r.name, "Kind": r.kind_code, "Uri": None, "Abv": -1, "Ibu": -1 } recipe_metadata.append(meta) return recipe_metadata def get_recipe_by_id(recipe_id): recipe = next((r for r in get_zseries_recipes() if str(r.id) == str(recipe_id)), None) return recipe def get_recipe_by_name(recipe_name): recipe = next((r for r in get_zseries_recipes() if r.name == recipe_name), None) return recipe def increment_session_id(uid): return len(list_brew_session_files(uid)) + (1 if active_brew_sessions[uid].session != '' else 1) def get_machine_by_session(session_id): return next((uid for uid in active_brew_sessions if active_brew_sessions[uid].session == session_id or active_brew_sessions[uid].id == int(session_id) or active_brew_sessions[uid].id == -1), None) def get_archived_sessions_by_machine(uid): brew_sessions = load_brew_sessions(uid=uid) return brew_sessions def session_type_from_filename(filename): info = filename.stem.split('#') session_type = SessionType.BEER try: if len(info) > 4: session_type = int(info[4]) except Exception as error: current_app.logger.warn("error occurred reading {}".format(filename),) return session_type
true
true
f711e80286662591c48c44238a56a9bddebb300e
520
py
Python
pygeppetto/model/utils/bytesuri.py
openworm/pygeppetto
949ab700bed7d1f5e5481898f717a15bd9d00002
[ "MIT" ]
10
2017-04-04T13:24:39.000Z
2021-11-16T11:50:13.000Z
pygeppetto/model/utils/bytesuri.py
openworm/org.geppetto.python
949ab700bed7d1f5e5481898f717a15bd9d00002
[ "MIT" ]
237
2021-03-17T09:40:17.000Z
2022-03-30T11:09:44.000Z
pygeppetto/model/utils/bytesuri.py
openworm/org.geppetto.python
949ab700bed7d1f5e5481898f717a15bd9d00002
[ "MIT" ]
5
2017-03-28T16:15:07.000Z
2020-11-25T03:07:07.000Z
try: from BytesIO import BytesIO except ImportError: from io import BytesIO from pyecore.resources import URI class BytesURI(URI): def __init__(self, uri, text=None): super(BytesURI, self).__init__(uri) if text is not None: self.__stream = BytesIO(text) def getvalue(self): return self.__stream.getvalue() def create_instream(self): return self.__stream def create_outstream(self): self.__stream = BytesIO() return self.__stream
20.8
43
0.655769
try: from BytesIO import BytesIO except ImportError: from io import BytesIO from pyecore.resources import URI class BytesURI(URI): def __init__(self, uri, text=None): super(BytesURI, self).__init__(uri) if text is not None: self.__stream = BytesIO(text) def getvalue(self): return self.__stream.getvalue() def create_instream(self): return self.__stream def create_outstream(self): self.__stream = BytesIO() return self.__stream
true
true
f711e98997d8eda653460add38d93b304d01864a
12,564
py
Python
src/ddog.py
srubenacker/DeepDog
ce6613e01c04a14f62a2d6f6cd1c60f97efa790a
[ "Unlicense" ]
null
null
null
src/ddog.py
srubenacker/DeepDog
ce6613e01c04a14f62a2d6f6cd1c60f97efa790a
[ "Unlicense" ]
1
2018-10-02T18:33:42.000Z
2018-10-04T01:15:29.000Z
src/ddog.py
srubenacker/DeepDog
ce6613e01c04a14f62a2d6f6cd1c60f97efa790a
[ "Unlicense" ]
null
null
null
import util import json import numpy as np import random import tensorflow as tf class DeepDog: """ The DeepDog class loads the training and test set images from disk into RAM, and provides functions to get the test set and mini batches of the training set. """ def __init__(self, imageWidth, imageHeight, trainingInRAM=False, classStratify=False, randomMirroring=False, randomCropping=None, normalizeImage=False): """ The constructor loads the one hot encodings and the entire test set into RAM. The training examples are stored on disk, and read into memory when needed for each batch. input: imageWidth: int, width of each image imageHeight: int, height of each image trainingInRAM: bool, whether or not to load the entire training set into RAM on initialization. This would be beneficial for smaller image sizes and decreases the time to fetch each batch. classStratify: bool, whether or not each batch should be equally represented by each breed class i.e. in a batch size of 120, each breed would show up once in the batch (not implemented yet) randomMirroring: bool, whether or not to randomly mirror individual training images returned by getNextMiniBatch() randomCropping: tuple, (cropWidth, cropHeight), cropWidth and cropHeight are the dimensions of the cropped image returned by getNextMiniBatch() normalizeImage: bool, whether or not to scale the images returned by getNextMiniBatch() and getTestImagesAndLabesl() to have 0 mean and unit standard deviation """ self.MIRROR_PROBABILITY = 0.5 self.randomMirroring = randomMirroring self.randomCropping = randomCropping if self.randomCropping is not None: self.cropWidth = self.randomCropping[0] self.cropHeight = self.randomCropping[1] self.normalizeImage = normalizeImage self.image_width = imageWidth self.image_height = imageHeight self.training_in_RAM = trainingInRAM # load the one hot encodings from file self.one_hot_encodings = {} self.loadOneHotEncodings() self.numberBreeds = float(len(self.one_hot_encodings.keys())) # load the test set from file self.test_set_images, self.test_set_labels = [], [] self.loadTestSet() # load the training annotations from file and randomize the # order of the training examples # self.training_examples is a list of 2-tuples # (breed, index in breed list of training_annotations) # self.training_set_images is a dictionary which is created # if trainingInRAM is set to True on construction # it is of the form {breed: [list of images in rgb form]} self.training_annotations = {} self.training_set_images = {} self.training_examples = [] self.training_set_size = 0 self.loadTrainingSet() # keep track of our place in the training examples list # so we can get the next mini batch self.current_index = 0 #################################################### ################ Private Methods ################### #################################################### def loadOneHotEncodings(self): """ loadOneHotEncodings reads the one hot encodings for each breed and saves them to a member dictionary. input: none output: (doesn't return, saves to member variable) self.one_hot_encodings: dictionary, {'breed': [1, 0, 0]} """ with open('one_hot_encodings.json', 'r') as data_file: self.one_hot_encodings = json.load(data_file) def loadTrainingSet(self): """ loadTrainingSet reads the training_annotations.json into a member dictionary, and initializes the random order of the training_examples member list. input: none output: (doesn't return, saves to member variables) self.training_annotations: dictionary, {'breed': [list of annotations]} self.training_examples: list of 2-tuples [(breed, index into list of self.training_annotations), ...] """ print("Initializing training set order...\n") # load the training_annotations with open('training_annotations.json', 'r') as data_file: self.training_annotations = json.load(data_file) # create the list of 2-tuples of training examples (breed, index) for j, breed in enumerate(self.training_annotations.keys()): if self.training_in_RAM: print(str(round(j / self.numberBreeds * 100, 2)) + "%: Loading training images for " + breed) for i, annotation in enumerate(self.training_annotations[breed]): self.training_examples.append((breed, i)) # if training_in_RAM is True, load the image from disk if self.training_in_RAM: currentImage = util.getResizedImageData(annotation, self.image_width, self.image_height) if breed not in self.training_set_images: self.training_set_images[breed] = [currentImage] else: self.training_set_images[breed].append(currentImage) self.training_set_size = len(self.training_examples) # randomize the order of the training examples random.shuffle(self.training_examples) print("Finished initializing training set order...\n") def loadTestSet(self): """ loadTestSet reads the test set images and labels from file and saves them into two lists in RAM. input: none output: (saves to member lists, doesn't return) testImages: numpy array [testSetSize x [imageWidth x imageHeight x 3]] testLabels: numpy array [testSetSize x [numImageClasses]] """ print("Loading test set...\n") testing_breeds = {} with open('testing_annotations.json', 'r') as data_file: testing_breeds = json.load(data_file) for i, breed in enumerate(testing_breeds.keys()): print(str(round(i / self.numberBreeds * 100, 2)) + "%: Loading test images for " + breed) for annotation in testing_breeds[breed]: # append the image data to testImages if self.randomCropping is None: self.test_set_images.append(util.getResizedImageData(annotation, self.image_width, self.image_height)) else: self.test_set_images.append(util.getResizedImageData(annotation, self.cropWidth, self.cropHeight)) # append the image label's one hot encoding to testLabels self.test_set_labels.append(self.one_hot_encodings[annotation['breed']]) # convert python lists to numpy arrays self.test_set_images = np.array(self.test_set_images) if self.normalizeImage: print("Normalizing test images...") self.test_set_images = tf.map_fn(tf.image.per_image_standardization, self.test_set_images) self.test_set_labels = np.array(self.test_set_labels) print("Finished loading test set.....\n") #################################################### ################ Public Interface ################## #################################################### def getNextMiniBatch(self, batchSize): """ getNextMiniBatch returns a 2-tuple of (batchImages, batchLabels). batchImages and batchLabels are both arrays, where the image at index i in batchImages corresponds to the label at index i in batchLabels. The batch images and labels are from the training set. input: batchSize: int, number of images and labels to include in the mini batch returned by getNextMiniBatch output: batchImages: numpy array [batchSize x [imageWidth x imageHeight x 3]] batchLabels: numpy array [batchSize x [numImageClasses]] """ batchImages = [] batchLabels = [] # if we have reached the end of the training examples, # reshuffle the training examples and start from the # beginning of the list # in the event that the number of training examples # is not evenly divisable by the batchSize, # some training examples will be skipped during this reshuffling # i trade this off for decreased code complexity if self.current_index + batchSize > self.training_set_size: self.current_index = 0 random.shuffle(self.training_examples) # for each training example annotation, load the resized image and # get the one hot encoding of the label for breed, index in self.training_examples[self.current_index:self.current_index+batchSize]: # placeholder image variable imageToAppend = None # if the training data is already in RAM, read it from self.training_set_images # otherwise, fetch the image from disk if self.training_in_RAM: imageToAppend = self.training_set_images[breed][index] else: annotation = self.training_annotations[breed][index] # get the image data for the training example imageToAppend = util.getResizedImageData(annotation, self.image_width, self.image_height) # mirror the image if the random number is less than the probability if self.randomMirroring and random.random() < self.MIRROR_PROBABILITY: imageToAppend = np.fliplr(imageToAppend) # randomly crop the image if self.randomCropping is not None: widthDiff = self.image_width - self.cropWidth heightDiff = self.image_height - self.cropHeight widthOffset = int(random.random() * widthDiff) heightOffset = int(random.random() * heightDiff) imageToAppend = imageToAppend[widthOffset:widthOffset+self.cropWidth, heightOffset:heightOffset+self.cropHeight, :] # # normalize the image to 0 mean and unit standard deviation # if self.normalizeImage: # imageToAppend = tf.image.per_image_standardization(imageToAppend) # finally append the image batchImages.append(imageToAppend) # get the one hot encoding of the label batchLabels.append(self.one_hot_encodings[breed]) self.current_index += batchSize if self.normalizeImage: batchImages = tf.map_fn(tf.image.per_image_standardization, batchImages) return batchImages, np.array(batchLabels) return np.array(batchImages), np.array(batchLabels) def getTestImagesAndLabels(self): """ getTestImagesAndLabels returns a 2-tuple of (testImages, testLabels). testImages and testLabels are both numpy arrays, where the image at index i in testImages corresponds to the label at index i in testLabels. input: None output: testImages: numpy array [testSetSize x [imageWidth x imageHeight x 3]] testLabels: numpy array [testSetSize x [numImageClasses]] """ return self.test_set_images, self.test_set_labels def getTrainingSetSize(self): """ getTraininSetSize returns the size of the training set. This function is useful when computing the progress inside an epoch. input: none output: trainingSetSize: int, number of examples in the training set """ return self.training_set_size def main(): dd = DeepDog(64, 64) im, la = dd.getNextMiniBatch(100) print(im.shape, la.shape) print(im) print(la) if __name__ == "__main__": main()
40.012739
109
0.610156
import util import json import numpy as np import random import tensorflow as tf class DeepDog: def __init__(self, imageWidth, imageHeight, trainingInRAM=False, classStratify=False, randomMirroring=False, randomCropping=None, normalizeImage=False): self.MIRROR_PROBABILITY = 0.5 self.randomMirroring = randomMirroring self.randomCropping = randomCropping if self.randomCropping is not None: self.cropWidth = self.randomCropping[0] self.cropHeight = self.randomCropping[1] self.normalizeImage = normalizeImage self.image_width = imageWidth self.image_height = imageHeight self.training_in_RAM = trainingInRAM self.one_hot_encodings = {} self.loadOneHotEncodings() self.numberBreeds = float(len(self.one_hot_encodings.keys())) self.test_set_images, self.test_set_labels = [], [] self.loadTestSet() self.training_annotations = {} self.training_set_images = {} self.training_examples = [] self.training_set_size = 0 self.loadTrainingSet() self.current_index = 0 # for each training example annotation, load the resized image and # get the one hot encoding of the label for breed, index in self.training_examples[self.current_index:self.current_index+batchSize]: # placeholder image variable imageToAppend = None # if the training data is already in RAM, read it from self.training_set_images # otherwise, fetch the image from disk if self.training_in_RAM: imageToAppend = self.training_set_images[breed][index] else: annotation = self.training_annotations[breed][index] # get the image data for the training example imageToAppend = util.getResizedImageData(annotation, self.image_width, self.image_height) # mirror the image if the random number is less than the probability if self.randomMirroring and random.random() < self.MIRROR_PROBABILITY: imageToAppend = np.fliplr(imageToAppend) # randomly crop the image if self.randomCropping is not None: widthDiff = self.image_width - self.cropWidth heightDiff = self.image_height - self.cropHeight widthOffset = int(random.random() * widthDiff) heightOffset = int(random.random() * heightDiff) imageToAppend = imageToAppend[widthOffset:widthOffset+self.cropWidth, heightOffset:heightOffset+self.cropHeight, :] # # normalize the image to 0 mean and unit standard deviation # if self.normalizeImage: # imageToAppend = tf.image.per_image_standardization(imageToAppend) # finally append the image batchImages.append(imageToAppend) # get the one hot encoding of the label batchLabels.append(self.one_hot_encodings[breed]) self.current_index += batchSize if self.normalizeImage: batchImages = tf.map_fn(tf.image.per_image_standardization, batchImages) return batchImages, np.array(batchLabels) return np.array(batchImages), np.array(batchLabels) def getTestImagesAndLabels(self): return self.test_set_images, self.test_set_labels def getTrainingSetSize(self): return self.training_set_size def main(): dd = DeepDog(64, 64) im, la = dd.getNextMiniBatch(100) print(im.shape, la.shape) print(im) print(la) if __name__ == "__main__": main()
true
true
f711ebb8335bcacaf06e71865a713f96fcade64c
193
py
Python
profiles_api/serializers.py
guicfernandes/profiles-rest-api
77229aa608eea654fe61479f49ba5e14aba41237
[ "MIT" ]
null
null
null
profiles_api/serializers.py
guicfernandes/profiles-rest-api
77229aa608eea654fe61479f49ba5e14aba41237
[ "MIT" ]
null
null
null
profiles_api/serializers.py
guicfernandes/profiles-rest-api
77229aa608eea654fe61479f49ba5e14aba41237
[ "MIT" ]
null
null
null
from rest_framework import serializers class HelloSerializer(serializers.Serializer): """Serializes a name field for testing our APIView""" name = serializers.CharField(max_length=10)
32.166667
57
0.787565
from rest_framework import serializers class HelloSerializer(serializers.Serializer): name = serializers.CharField(max_length=10)
true
true
f711ebea4ca8f3cc0b5fab941ae10170f6556214
5,814
py
Python
haco/DIDrive_core/demo/cilrs/cilrs_collect_data.py
decisionforce/HACO
ebd1dc49598e6ae2704e58c053cc35f2d9e28429
[ "Apache-2.0" ]
21
2022-02-15T10:11:54.000Z
2022-03-24T17:44:29.000Z
haco/DIDrive_core/demo/cilrs/cilrs_collect_data.py
decisionforce/HACO
ebd1dc49598e6ae2704e58c053cc35f2d9e28429
[ "Apache-2.0" ]
null
null
null
haco/DIDrive_core/demo/cilrs/cilrs_collect_data.py
decisionforce/HACO
ebd1dc49598e6ae2704e58c053cc35f2d9e28429
[ "Apache-2.0" ]
3
2022-02-22T11:11:43.000Z
2022-03-17T17:58:44.000Z
import os from functools import partial import PIL import lmdb import numpy as np from ding.envs import SyncSubprocessEnvManager from ding.utils.default_helper import deep_merge_dicts from easydict import EasyDict from tqdm import tqdm from haco.DIDrive_core.data import CarlaBenchmarkCollector, BenchmarkDatasetSaver from haco.DIDrive_core.envs import SimpleCarlaEnv, CarlaEnvWrapper from haco.DIDrive_core.policy import AutoPIDPolicy from haco.DIDrive_core.utils.others.tcp_helper import parse_carla_tcp config = dict( env=dict( env_num=5, simulator=dict( disable_two_wheels=True, planner=dict( type='behavior', resolution=1, ), obs=( dict( name='rgb', type='rgb', size=[400, 300], position=[1.3, 0.0, 2.3], fov=100, ), ), verbose=True, ), col_is_failure=True, stuck_is_failure=True, ran_light_is_failure=True, manager=dict( auto_reset=False, shared_memory=False, context='spawn', max_retry=1, ), wrapper=dict( speed_factor=25., scale=1, crop=256, ), ), server=[ dict(carla_host='localhost', carla_ports=[9000, 9010, 2]), ], policy=dict( target_speed=25, tl_threshold=13, noise=True, noise_kwargs=dict(), collect=dict( n_episode=100, dir_path='./datasets_train/cilrs_datasets_train', preloads_name='cilrs_datasets_train.npy', collector=dict( suite='FullTown01-v1', nocrash=True, ), ) ), ) main_config = EasyDict(config) def cilrs_postprocess(observasion, scale=1, crop=256): rgb = observasion['rgb'].copy() im = PIL.Image.fromarray(rgb) (width, height) = (int(im.width // scale), int(im.height // scale)) rgb = im.resize((width, height)) rgb = np.asarray(rgb) start_x = height // 2 - crop // 2 start_y = width // 2 - crop // 2 rgb = rgb[start_x:start_x + crop, start_y:start_y + crop] sensor_data = {'rgb': rgb} others = {} return sensor_data, others def wrapped_env(env_cfg, wrapper_cfg, host, port, tm_port=None): return CarlaEnvWrapper(SimpleCarlaEnv(env_cfg, host, port, tm_port), wrapper_cfg) def post_process(config): epi_folder = [x for x in os.listdir(config.policy.collect.dir_path) if x.startswith('epi')] all_img_list = [] all_mea_list = [] for item in tqdm(epi_folder): lmdb_file = lmdb.open(os.path.join(config.policy.collect.dir_path, item, 'measurements.lmdb')).begin(write=False) png_files = [ x for x in os.listdir(os.path.join(config.policy.collect.dir_path, item)) if (x.endswith('png') and x.startswith('rgb')) ] png_files.sort() for png_file in png_files: index = png_file.split('_')[1].split('.')[0] measurements = np.frombuffer(lmdb_file.get(('measurements_%05d' % int(index)).encode()), np.float32) data = {} data['control'] = np.array([measurements[15], measurements[16], measurements[17]]).astype(np.float32) data['speed'] = measurements[10] / config.env.wrapper.speed_factor data['command'] = float(measurements[11]) new_dict = {} new_dict['brake'] = data['control'][2] new_dict['steer'] = (data['control'][0] + 1) / 2 new_dict['throttle'] = data['control'][1] new_dict['speed'] = data['speed'] new_dict['command'] = data['command'] all_img_list.append(os.path.join(item, png_file)) all_mea_list.append(new_dict) if not os.path.exists('_preloads'): os.mkdir('_preloads') np.save('_preloads/{}'.format(config.policy.collect.preloads_name), [all_img_list, all_mea_list]) def main(cfg, seed=0): cfg.env.manager = deep_merge_dicts(SyncSubprocessEnvManager.default_config(), cfg.env.manager) tcp_list = parse_carla_tcp(cfg.server) env_num = cfg.env.env_num assert len(tcp_list) >= env_num, \ "Carla server not enough! Need {} servers but only found {}.".format(env_num, len(tcp_list)) collector_env = SyncSubprocessEnvManager( env_fn=[partial(wrapped_env, cfg.env, cfg.env.wrapper, *tcp_list[i]) for i in range(env_num)], cfg=cfg.env.manager, ) policy = AutoPIDPolicy(cfg.policy) collector = CarlaBenchmarkCollector(cfg.policy.collect.collector, collector_env, policy.collect_mode) if not os.path.exists(cfg.policy.collect.dir_path): os.makedirs(cfg.policy.collect.dir_path) collected_episodes = 0 data_postprocess = lambda x: cilrs_postprocess(x, scale=cfg.env.wrapper.scale, crop=cfg.env.wrapper.crop) saver = BenchmarkDatasetSaver(cfg.policy.collect.dir_path, cfg.env.simulator.obs, data_postprocess) print('[MAIN] Start collecting data') saver.make_dataset_path(cfg.policy.collect) while collected_episodes < cfg.policy.collect.n_episode: # Sampling data from environments n_episode = min(cfg.policy.collect.n_episode - collected_episodes, env_num * 2) new_data = collector.collect(n_episode=n_episode) saver.save_episodes_data(new_data, start_episode=collected_episodes) collected_episodes += n_episode print('[MAIN] Current collected: ', collected_episodes, '/', cfg.policy.collect.n_episode) collector_env.close() saver.make_index() print('[MAIN] Making preloads') post_process(cfg) if __name__ == '__main__': main(main_config)
35.024096
132
0.626419
import os from functools import partial import PIL import lmdb import numpy as np from ding.envs import SyncSubprocessEnvManager from ding.utils.default_helper import deep_merge_dicts from easydict import EasyDict from tqdm import tqdm from haco.DIDrive_core.data import CarlaBenchmarkCollector, BenchmarkDatasetSaver from haco.DIDrive_core.envs import SimpleCarlaEnv, CarlaEnvWrapper from haco.DIDrive_core.policy import AutoPIDPolicy from haco.DIDrive_core.utils.others.tcp_helper import parse_carla_tcp config = dict( env=dict( env_num=5, simulator=dict( disable_two_wheels=True, planner=dict( type='behavior', resolution=1, ), obs=( dict( name='rgb', type='rgb', size=[400, 300], position=[1.3, 0.0, 2.3], fov=100, ), ), verbose=True, ), col_is_failure=True, stuck_is_failure=True, ran_light_is_failure=True, manager=dict( auto_reset=False, shared_memory=False, context='spawn', max_retry=1, ), wrapper=dict( speed_factor=25., scale=1, crop=256, ), ), server=[ dict(carla_host='localhost', carla_ports=[9000, 9010, 2]), ], policy=dict( target_speed=25, tl_threshold=13, noise=True, noise_kwargs=dict(), collect=dict( n_episode=100, dir_path='./datasets_train/cilrs_datasets_train', preloads_name='cilrs_datasets_train.npy', collector=dict( suite='FullTown01-v1', nocrash=True, ), ) ), ) main_config = EasyDict(config) def cilrs_postprocess(observasion, scale=1, crop=256): rgb = observasion['rgb'].copy() im = PIL.Image.fromarray(rgb) (width, height) = (int(im.width // scale), int(im.height // scale)) rgb = im.resize((width, height)) rgb = np.asarray(rgb) start_x = height // 2 - crop // 2 start_y = width // 2 - crop // 2 rgb = rgb[start_x:start_x + crop, start_y:start_y + crop] sensor_data = {'rgb': rgb} others = {} return sensor_data, others def wrapped_env(env_cfg, wrapper_cfg, host, port, tm_port=None): return CarlaEnvWrapper(SimpleCarlaEnv(env_cfg, host, port, tm_port), wrapper_cfg) def post_process(config): epi_folder = [x for x in os.listdir(config.policy.collect.dir_path) if x.startswith('epi')] all_img_list = [] all_mea_list = [] for item in tqdm(epi_folder): lmdb_file = lmdb.open(os.path.join(config.policy.collect.dir_path, item, 'measurements.lmdb')).begin(write=False) png_files = [ x for x in os.listdir(os.path.join(config.policy.collect.dir_path, item)) if (x.endswith('png') and x.startswith('rgb')) ] png_files.sort() for png_file in png_files: index = png_file.split('_')[1].split('.')[0] measurements = np.frombuffer(lmdb_file.get(('measurements_%05d' % int(index)).encode()), np.float32) data = {} data['control'] = np.array([measurements[15], measurements[16], measurements[17]]).astype(np.float32) data['speed'] = measurements[10] / config.env.wrapper.speed_factor data['command'] = float(measurements[11]) new_dict = {} new_dict['brake'] = data['control'][2] new_dict['steer'] = (data['control'][0] + 1) / 2 new_dict['throttle'] = data['control'][1] new_dict['speed'] = data['speed'] new_dict['command'] = data['command'] all_img_list.append(os.path.join(item, png_file)) all_mea_list.append(new_dict) if not os.path.exists('_preloads'): os.mkdir('_preloads') np.save('_preloads/{}'.format(config.policy.collect.preloads_name), [all_img_list, all_mea_list]) def main(cfg, seed=0): cfg.env.manager = deep_merge_dicts(SyncSubprocessEnvManager.default_config(), cfg.env.manager) tcp_list = parse_carla_tcp(cfg.server) env_num = cfg.env.env_num assert len(tcp_list) >= env_num, \ "Carla server not enough! Need {} servers but only found {}.".format(env_num, len(tcp_list)) collector_env = SyncSubprocessEnvManager( env_fn=[partial(wrapped_env, cfg.env, cfg.env.wrapper, *tcp_list[i]) for i in range(env_num)], cfg=cfg.env.manager, ) policy = AutoPIDPolicy(cfg.policy) collector = CarlaBenchmarkCollector(cfg.policy.collect.collector, collector_env, policy.collect_mode) if not os.path.exists(cfg.policy.collect.dir_path): os.makedirs(cfg.policy.collect.dir_path) collected_episodes = 0 data_postprocess = lambda x: cilrs_postprocess(x, scale=cfg.env.wrapper.scale, crop=cfg.env.wrapper.crop) saver = BenchmarkDatasetSaver(cfg.policy.collect.dir_path, cfg.env.simulator.obs, data_postprocess) print('[MAIN] Start collecting data') saver.make_dataset_path(cfg.policy.collect) while collected_episodes < cfg.policy.collect.n_episode: n_episode = min(cfg.policy.collect.n_episode - collected_episodes, env_num * 2) new_data = collector.collect(n_episode=n_episode) saver.save_episodes_data(new_data, start_episode=collected_episodes) collected_episodes += n_episode print('[MAIN] Current collected: ', collected_episodes, '/', cfg.policy.collect.n_episode) collector_env.close() saver.make_index() print('[MAIN] Making preloads') post_process(cfg) if __name__ == '__main__': main(main_config)
true
true
f711ed3c2a7d9bd9e45ef8312a2b448a64bf6a14
3,741
py
Python
game/test/test_ai.py
NejcZupec/tictactoe
408fe11a0b9159a0f06110a98b94042e38676d86
[ "Apache-2.0" ]
1
2016-04-15T14:18:22.000Z
2016-04-15T14:18:22.000Z
game/test/test_ai.py
NejcZupec/tictactoe
408fe11a0b9159a0f06110a98b94042e38676d86
[ "Apache-2.0" ]
3
2016-04-14T19:21:22.000Z
2016-04-15T12:47:29.000Z
game/test/test_ai.py
NejcZupec/tictactoe
408fe11a0b9159a0f06110a98b94042e38676d86
[ "Apache-2.0" ]
1
2020-09-26T00:00:01.000Z
2020-09-26T00:00:01.000Z
from django.test import TestCase from game.ai import TicTacToeAI class TicTacToeAITest(TestCase): def setUp(self): board_state = [['o', ' ', 'x'], ['x', ' ', ' '], ['x', 'o', 'o']] self.g = TicTacToeAI(board_state) def test_possible_moves(self): self.assertEqual(self.g.possible_moves(), [(0, 1), (1, 1), (1, 2)]) def test_winner_o(self): self.assertEqual(self.g.board_status(), None) self.g.move('o', 1, 1) self.assertEqual(self.g.board_status(), 'o') def test_winner_x(self): self.assertEqual(self.g.board_status(), None) self.g.move('x', 1, 1) self.assertEqual(self.g.board_status(), 'x') def test_draw_board_status(self): draw_board_state = [['o', 'o', 'x'], ['x', 'x', 'o'], ['o', 'x', 'o']] g = TicTacToeAI(draw_board_state) self.assertEqual(g.board_status(), 'draw') def test_score_possible_moves(self): self.assertEqual(self.g.score_possible_moves(), [None, 1, None]) class TestSpecificGameStates(TestCase): def test_state_1(self): bs = [['o', 'x', 'x'], ['x', ' ', 'o'], ['x', 'o', 'o']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (1, 1)) g = TicTacToeAI(bs, 'o') self.assertEqual(g.get_next_move(), (1, 1)) def test_state_2(self): bs = [['o', ' ', 'x'], ['x', ' ', ' '], ['x', 'o', 'o']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (1, 1)) g = TicTacToeAI(bs, 'o') self.assertEqual(g.get_next_move(), (1, 1)) def test_state_3(self): bs = [['o', 'x', 'x'], ['x', ' ', ' '], ['x', 'o', 'o']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (1, 1)) g = TicTacToeAI(bs, 'o') self.assertEqual(g.get_next_move(), (1, 1)) def test_state_4(self): bs = [['x', ' ', ' '], [' ', ' ', ' '], [' ', ' ', ' ']] g = TicTacToeAI(bs, 'o') self.assertIn(g.get_next_move(), [(0, 1), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2), (2, 0), (2, 1), (2, 2)]) def test_state_5(self): # loose position bs = [['x', ' ', 'x'], [' ', 'o', 'x'], ['o', ' ', ' ']] g = TicTacToeAI(bs, 'o') self.assertIn(g.get_next_move(), [(0, 1), (1, 0), (2, 1), (2, 2)]) def test_state_6(self): bs = [['x', ' ', ' '], [' ', 'o', ' '], [' ', ' ', 'x']] g = TicTacToeAI(bs, 'o') for _ in range(10): self.assertIn(g.get_next_move(), [(0, 1), (1, 0), (1, 2), (2, 1)]) def test_state_7(self): # draw state bs = [['x', 'x', 'o'], ['o', 'o', 'x'], ['x', ' ', ' ']] g = TicTacToeAI(bs, 'o') self.assertIn(g.get_next_move(), [(2, 1), (2, 2)]) def test_state_8(self): # draw state bs = [[' ', ' ', ' '], [' ', 'o', ' '], ['x', 'o', 'x']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (0, 1)) def test_state_9(self): bs = [['x', 'o', 'x'], [' ', 'x', 'o'], ['o', ' ', ' ']] g = TicTacToeAI(bs, 'x') for _ in range(10): self.assertEqual(g.get_next_move(), (2, 2)) def test_state_10(self): bs = [['x', 'o', 'x'], ['o', 'x', 'o'], ['o', ' ', ' ']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (2, 2))
28.557252
114
0.42101
from django.test import TestCase from game.ai import TicTacToeAI class TicTacToeAITest(TestCase): def setUp(self): board_state = [['o', ' ', 'x'], ['x', ' ', ' '], ['x', 'o', 'o']] self.g = TicTacToeAI(board_state) def test_possible_moves(self): self.assertEqual(self.g.possible_moves(), [(0, 1), (1, 1), (1, 2)]) def test_winner_o(self): self.assertEqual(self.g.board_status(), None) self.g.move('o', 1, 1) self.assertEqual(self.g.board_status(), 'o') def test_winner_x(self): self.assertEqual(self.g.board_status(), None) self.g.move('x', 1, 1) self.assertEqual(self.g.board_status(), 'x') def test_draw_board_status(self): draw_board_state = [['o', 'o', 'x'], ['x', 'x', 'o'], ['o', 'x', 'o']] g = TicTacToeAI(draw_board_state) self.assertEqual(g.board_status(), 'draw') def test_score_possible_moves(self): self.assertEqual(self.g.score_possible_moves(), [None, 1, None]) class TestSpecificGameStates(TestCase): def test_state_1(self): bs = [['o', 'x', 'x'], ['x', ' ', 'o'], ['x', 'o', 'o']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (1, 1)) g = TicTacToeAI(bs, 'o') self.assertEqual(g.get_next_move(), (1, 1)) def test_state_2(self): bs = [['o', ' ', 'x'], ['x', ' ', ' '], ['x', 'o', 'o']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (1, 1)) g = TicTacToeAI(bs, 'o') self.assertEqual(g.get_next_move(), (1, 1)) def test_state_3(self): bs = [['o', 'x', 'x'], ['x', ' ', ' '], ['x', 'o', 'o']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (1, 1)) g = TicTacToeAI(bs, 'o') self.assertEqual(g.get_next_move(), (1, 1)) def test_state_4(self): bs = [['x', ' ', ' '], [' ', ' ', ' '], [' ', ' ', ' ']] g = TicTacToeAI(bs, 'o') self.assertIn(g.get_next_move(), [(0, 1), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2), (2, 0), (2, 1), (2, 2)]) def test_state_5(self): bs = [['x', ' ', 'x'], [' ', 'o', 'x'], ['o', ' ', ' ']] g = TicTacToeAI(bs, 'o') self.assertIn(g.get_next_move(), [(0, 1), (1, 0), (2, 1), (2, 2)]) def test_state_6(self): bs = [['x', ' ', ' '], [' ', 'o', ' '], [' ', ' ', 'x']] g = TicTacToeAI(bs, 'o') for _ in range(10): self.assertIn(g.get_next_move(), [(0, 1), (1, 0), (1, 2), (2, 1)]) def test_state_7(self): bs = [['x', 'x', 'o'], ['o', 'o', 'x'], ['x', ' ', ' ']] g = TicTacToeAI(bs, 'o') self.assertIn(g.get_next_move(), [(2, 1), (2, 2)]) def test_state_8(self): bs = [[' ', ' ', ' '], [' ', 'o', ' '], ['x', 'o', 'x']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (0, 1)) def test_state_9(self): bs = [['x', 'o', 'x'], [' ', 'x', 'o'], ['o', ' ', ' ']] g = TicTacToeAI(bs, 'x') for _ in range(10): self.assertEqual(g.get_next_move(), (2, 2)) def test_state_10(self): bs = [['x', 'o', 'x'], ['o', 'x', 'o'], ['o', ' ', ' ']] g = TicTacToeAI(bs, 'x') self.assertEqual(g.get_next_move(), (2, 2))
true
true
f711edb0afff634303521d7f3bc0c72083fd877a
656
py
Python
core/recc/container/struct/port_binding_guest.py
bogonets/answer
57f892a9841980bcbc35fa1e27521b34cd94bc25
[ "MIT" ]
3
2021-06-20T02:24:10.000Z
2022-01-26T23:55:33.000Z
core/recc/container/struct/port_binding_guest.py
bogonets/answer
57f892a9841980bcbc35fa1e27521b34cd94bc25
[ "MIT" ]
null
null
null
core/recc/container/struct/port_binding_guest.py
bogonets/answer
57f892a9841980bcbc35fa1e27521b34cd94bc25
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from typing import Union class PortBindingGuest: __slots__ = ("port", "protocol") port: int protocol: str def __init__(self, port: Union[int, str], protocol: str): if isinstance(port, int): self.port = port else: self.port = int(port) self.protocol = protocol def __str__(self) -> str: return f"{self.port}/{self.protocol}" def __repr__(self): return f"PortBindingGuest<{self.__str__()}>" def __hash__(self) -> int: return hash(self.__str__()) def __eq__(self, other) -> bool: return hash(self) == hash(other)
21.16129
61
0.577744
from typing import Union class PortBindingGuest: __slots__ = ("port", "protocol") port: int protocol: str def __init__(self, port: Union[int, str], protocol: str): if isinstance(port, int): self.port = port else: self.port = int(port) self.protocol = protocol def __str__(self) -> str: return f"{self.port}/{self.protocol}" def __repr__(self): return f"PortBindingGuest<{self.__str__()}>" def __hash__(self) -> int: return hash(self.__str__()) def __eq__(self, other) -> bool: return hash(self) == hash(other)
true
true
f711edf882ba9f6ec3210c7053a35e58218d777c
698
py
Python
pyzoo/zoo/xshard/pandas/__init__.py
yushan111/analytics-zoo
cf63e52e1dc2969a10fce56740a1fecb510a46d2
[ "Apache-2.0" ]
null
null
null
pyzoo/zoo/xshard/pandas/__init__.py
yushan111/analytics-zoo
cf63e52e1dc2969a10fce56740a1fecb510a46d2
[ "Apache-2.0" ]
2
2018-10-31T01:20:05.000Z
2018-11-02T06:06:35.000Z
pyzoo/zoo/xshard/pandas/__init__.py
yushan111/analytics-zoo
cf63e52e1dc2969a10fce56740a1fecb510a46d2
[ "Apache-2.0" ]
4
2019-02-25T03:26:56.000Z
2019-03-06T04:41:31.000Z
# # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from zoo.xshard.pandas.preprocessing import read_csv from zoo.xshard.pandas.preprocessing import read_json
36.736842
74
0.775072
from zoo.xshard.pandas.preprocessing import read_csv from zoo.xshard.pandas.preprocessing import read_json
true
true
f711eeb2544235dce29805e572a07830e76565f5
17,435
py
Python
veroviz/_queryPgRouting.py
INFORMSJoC/2020.0340
9536a35b9607266ad95799cbb7e59c9451aaa6ea
[ "MIT" ]
1
2022-03-28T09:56:53.000Z
2022-03-28T09:56:53.000Z
veroviz/_queryPgRouting.py
INFORMSJoC/2020.0340
9536a35b9607266ad95799cbb7e59c9451aaa6ea
[ "MIT" ]
null
null
null
veroviz/_queryPgRouting.py
INFORMSJoC/2020.0340
9536a35b9607266ad95799cbb7e59c9451aaa6ea
[ "MIT" ]
1
2021-10-30T05:01:49.000Z
2021-10-30T05:01:49.000Z
# Copyright (c) 2021 Lan Peng and Chase Murray # Licensed under the MIT License. See LICENSING for details. from veroviz._common import * from veroviz._internal import locs2Dict from veroviz._internal import loc2Dict from veroviz._geometry import geoDistance2D def pgrGetSnapToRoadLatLon(gid, loc, databaseName): """ A function to get snapped latlng for one coordinate using pgRouting Parameters ---------- gid: int The gid of the street in pgRouting database loc: list The location to be snapped to road databaseName: string, Require If you are hosting a data provider on your local machine (e.g., pgRouting), you'll need to specify the name of the local database. Returns ------- list A snapped locations in the format of [lat, lon], notice that this function will lost the info of altitude of the location. """ conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) cur = conn.cursor() # For maintainability dicLoc = loc2Dict(loc) sqlCommand = " select ST_X(point), ST_Y(point)" sqlCommand += " from (" sqlCommand += " select ST_ClosestPoint(" sqlCommand += " ST_GeomFromEWKT(CONCAT('SRID=4326; LINESTRING(',x1,' ',y1,', ',x2,' ',y2,')'))," sqlCommand += " ST_GeomFromEWKT('SRID=4326;POINT(%s %s)')) as point" % (dicLoc['lon'], dicLoc['lat']) # Be very careful about lon and lat sqlCommand += " from ways" sqlCommand += " where gid=%s" % (gid) sqlCommand += " ) a;" cur.execute(sqlCommand) row = cur.fetchone() snapLoc = [row[1], row[0]] conn.close() return snapLoc def pgrGetNearestStreet(loc, databaseName): """ A function to return the details of the nearest street given a known coordinate Parameters ---------- loc: list The locationi that trying to find the nearest street of databaseName: string, Require If you are hosting a data provider on your local machine (e.g., pgRouting), you'll need to specify the name of the local database. Returns ------- gid: int gid from Ways table, identifier for street sourceVid: int sourceVid from Ways table, identifier for source vertice targetVid: int targetVid from Ways table, identifier for target vertice sourceLat: int sourceLat from Ways table, latitude for source vertice sourceLon: int sourceLon from Ways table, longitude for source vertice targetLat: int targetLat from Ways table, latitude for target vertice targetLon: int targetLon from Ways table, longitude for target vertice cost_s: int cost_s from Ways table, time needs from source to target reverse_cost_s: int reverse_cost_s from Ways table, time needs from target to source one_way: int one_way from Ways table, indicate if it is one way street """ conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) cur = conn.cursor() # For maintainability dicLoc = loc2Dict(loc) try: sqlCommand = " select gid, source, target, y1, x1, y2, x2, cost_s, reverse_cost_s, one_way" sqlCommand += " from " sqlCommand += " ways" sqlCommand += " where" sqlCommand += " x1 >= %s - 0.01 and x1 <= %s + 0.01" % (dicLoc['lon'], dicLoc['lon']) # Eliminate most of the ways there sqlCommand += " order by" sqlCommand += " ST_Distance(" sqlCommand += " ST_GeogFromText('SRID=4326; POINT(%s %s)')," % (dicLoc['lon'], dicLoc['lat']) # Be very careful about lon and lat sqlCommand += " ST_GeogFromText(CONCAT('SRID=4326; LINESTRING(',x1,' ',y1,', ',x2,' ',y2,')')))" sqlCommand += " limit 1;" cur.execute(sqlCommand) row = cur.fetchone() street = { "gid" : int(row[0]), "source" : int(row[1]), "target" : int(row[2]), "sourceLoc" : [row[3], row[4]], "targetLoc" : [row[5], row[6]], "cost_s" : row[7], "reverse_cost_s" : row[8], "one_way" : row[9] } except: sqlCommand = " select gid, source, target, y1, x1, y2, x2, length_m, cost_s, reverse_cost_s, one_way" sqlCommand += " from " sqlCommand += " ways" sqlCommand += " order by" sqlCommand += " ST_Distance(" sqlCommand += " ST_GeogFromText('SRID=4326; POINT(%s %s)')," % (dicLoc['lon'], dicLoc['lat']) # Be very careful about lon and lat sqlCommand += " ST_GeogFromText(CONCAT('SRID=4326; LINESTRING(',x1,' ',y1,', ',x2,' ',y2,')')))" sqlCommand += " limit 1;" cur.execute(sqlCommand) row = cur.fetchone() street = { "gid" : int(row[0]), "source" : int(row[1]), "target" : int(row[2]), "sourceLoc" : [row[3], row[4]], "targetLoc" : [row[5], row[6]], "cost_s" : row[7], "reverse_cost_s" : row[8], "one_way" : row[9] } conn.close() return street def pgrGetShapepointsTimeDist(startLoc, endLoc, databaseName): """ A function to get a list of shapepoints from start coordinate to end coordinate. Parameters ---------- startLoc: list Start location, the format is [lat, lon] (altitude, above sea level, set to be 0) or [lat, lon, alt] endLat: float Required, latitude of end coordinate endLon: float Required, longitude of end coordinate databaseName: string, Require If you are hosting a data provider on your local machine (e.g., pgRouting), you'll need to specify the name of the local database. Returns ------- path: list of lists A list of coordinates in sequence that shape the route from startLoc to endLoc timeSecs: list time between current shapepoint and previous shapepoint, the first element should be 0 distMeters: list distance between current shapepoint and previous shapepoint, the first element should be 0 """ conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) conn.autocommit = True cur = conn.cursor() # Calculate the distance between snapped location and source/target of closest street for the START coordinate startStreet = pgrGetNearestStreet(startLoc, databaseName) snapStartLoc = pgrGetSnapToRoadLatLon(startStreet['gid'], startLoc, databaseName) dicSnapStartLoc = loc2Dict(snapStartLoc) distSnapStart2Source = geoDistance2D(snapStartLoc, startStreet['sourceLoc']) distSnapStart2Target = geoDistance2D(snapStartLoc, startStreet['targetLoc']) # Calculate the distance between snapped location and source/target of closest street for the END coordinate endStreet = pgrGetNearestStreet(endLoc, databaseName) snapEndLoc = pgrGetSnapToRoadLatLon(endStreet['gid'], endLoc, databaseName) dicSnapEndLoc = loc2Dict(snapEndLoc) distSnapEnd2Source = geoDistance2D(snapEndLoc, endStreet['sourceLoc']) distSnapEnd2Target = geoDistance2D(snapEndLoc, endStreet['targetLoc']) # Find the number of vertices in the pgRouting database sqlCommand = " select count(*) from ways_vertices_pgr;" cur.execute(sqlCommand) row = cur.fetchone() newlyInsertVidNum = int(row[0]) + 1 # Testify and find a dummyClassID to put temp vertices and segments dummyClassID = 821 # Hard-coded number, no specific meaning # FIXME! For database security reason, we need to testify if class_id = 821 is not used in the original database # insert the snapped location for START coordinate, and two segments from the coordinate to source/target of the closest street sqlCommand = " insert into ways_vertices_pgr (id, lon, lat) values (%s, %s, %s);" % ( newlyInsertVidNum, dicSnapStartLoc['lon'], dicSnapStartLoc['lat']) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, newlyInsertVidNum, startStreet['target'], distSnapStart2Target, dicSnapStartLoc['lon'], dicSnapStartLoc['lat'], startStreet['targetLoc'][1], startStreet['targetLoc'][0], startStreet['cost_s'] * distSnapStart2Target / (distSnapStart2Target + distSnapStart2Source), startStreet['reverse_cost_s'] * distSnapStart2Target / (distSnapStart2Target + distSnapStart2Source)) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, startStreet['source'], newlyInsertVidNum, distSnapStart2Source, startStreet['sourceLoc'][1], startStreet['sourceLoc'][0], dicSnapStartLoc['lon'], dicSnapStartLoc['lat'], startStreet['cost_s'] * distSnapStart2Source / (distSnapStart2Target + distSnapStart2Source), startStreet['reverse_cost_s'] * distSnapStart2Source / (distSnapStart2Target + distSnapStart2Source)) # insert the snapped location for END coordinate, and two segments from the coordinate to source/target of the closest street sqlCommand += " insert into ways_vertices_pgr (id, lon, lat) values (%s, %s, %s);" % ( newlyInsertVidNum + 1, dicSnapEndLoc['lon'], dicSnapEndLoc['lat']) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, newlyInsertVidNum + 1, endStreet['target'], distSnapEnd2Target, dicSnapEndLoc['lon'], dicSnapEndLoc['lat'], endStreet['targetLoc'][1], endStreet['targetLoc'][0], endStreet['cost_s'] * distSnapEnd2Target / (distSnapEnd2Target + distSnapEnd2Source), endStreet['reverse_cost_s'] * distSnapEnd2Target / (distSnapEnd2Target + distSnapEnd2Source)) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, endStreet['source'], newlyInsertVidNum + 1, distSnapEnd2Source, endStreet['sourceLoc'][1], endStreet['sourceLoc'][0], dicSnapEndLoc['lon'], dicSnapEndLoc['lat'], endStreet['cost_s'] * distSnapEnd2Source / (distSnapEnd2Target + distSnapEnd2Source), endStreet['reverse_cost_s'] * distSnapEnd2Source / (distSnapEnd2Target + distSnapEnd2Source)) # Do dijstra algorithm to find shortest path sqlCommand += " select b.gid as gid, b.y1 as lats1, b.x1 as lons1, b.y2 as lats2, b.x2 as lons2, a.cost as secs, b.length_m as dist " sqlCommand += " from " sqlCommand += " pgr_dijkstra(" sqlCommand += " 'select gid as id, source, target, cost_s as cost, reverse_cost_s as reverse_cost from ways'," sqlCommand += " %s," % (newlyInsertVidNum) sqlCommand += " %s," % (newlyInsertVidNum + 1) sqlCommand += " directed := true" sqlCommand += " ) a" sqlCommand += " left join" sqlCommand += " ways b" sqlCommand += " on a.edge = b.gid" sqlCommand += " order by a.path_seq" # Return the shapepoint result from dijstra algorithm cur.execute(sqlCommand) row = cur.fetchall() summary = pd.DataFrame(row, columns=['gid', 'lats1', 'lons1', 'lats2', 'lons2', 'secs', 'dist']) # Delete the temp data sqlCommand = " delete from ways_vertices_pgr where id = (%s);" % (newlyInsertVidNum) sqlCommand += " delete from ways_vertices_pgr where id = (%s);" % (newlyInsertVidNum + 1) sqlCommand += " delete from ways where class_id = %s;" % (dummyClassID) cur.execute(sqlCommand) # The last row is junk info, drop it summary.drop(summary.index[len(summary) - 1], inplace = True) # Sorting the coordinates so that they can be linked to each other lats1 = summary['lats1'].tolist() lons1 = summary['lons1'].tolist() lats2 = summary['lats2'].tolist() lons2 = summary['lons2'].tolist() path = [] path.append(startLoc) for i in range(1, len(lats1)): if (lats1[i] != lats1[i - 1] and lats1[i] != lats2[i - 1]): path.append([lats1[i], lons1[i]]) else: path.append([lats2[i], lons2[i]]) timeSecs = summary['secs'].tolist() distMeters = summary['dist'].tolist() conn.close() return [path, timeSecs, distMeters] def pgrGetTimeDist(fromLocs, toLocs, databaseName): """ This function generated time and distance matrix using pgRouting Parameters ---------- fromLoc: list, Conditional Used in 'one2many' mode. To state the coordinate of the starting node locs: list of lists Used in 'all2all', 'one2many', 'many2one' modes. A list of coordinates, in the format of [[lat1, lon1], [lat2, lon2], ...] toLoc: list, Conditional Used in 'many2one' mode. To state the coordinate of the ending node databaseName: string If you are hosting a data provider on your local machine (e.g., pgRouting), you'll need to specify the name of the local database. Returns ------- timeSecs: dictionary The key of each item in this dictionary is in (coordID1, coordID2) format, the travelling time from first entry to second entry, the units are seconds distMeters: dictionary The key of each item in this dictionary is in (coordID1, coordID2) format, the travelling distance from first entry to second entry, the units are meters """ conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) conn.autocommit = True cur = conn.cursor() dummyClassID = 821 # Hard-coded number, no specific meaning # FIXME! For database security reason, we need to testify if class_id = 821 is not used in the original database sqlCommand = " select max(id) from ways_vertices_pgr;" cur.execute(sqlCommand) row = cur.fetchone() newlyInsertVidNum = int(row[0]) + 1 locs = fromLocs.copy() for i in range(len(toLocs)): try: locs.index(toLocs[i]) except ValueError: locs.append(toLocs[i]) startVidList = [] endVidList = [] for i in range(len(fromLocs)): startVidList.append(newlyInsertVidNum + locs.index(fromLocs[i])) for i in range(len(toLocs)): endVidList.append(newlyInsertVidNum + locs.index(toLocs[i])) for i in range(len(locs)): # Add dummy vertices street = pgrGetNearestStreet(locs[i], databaseName) snapLoc = pgrGetSnapToRoadLatLon(street['gid'], locs[i], databaseName) dicSnapLoc = loc2Dict(snapLoc) sqlCommand = " insert into ways_vertices_pgr (id, lon, lat) values (%s, %s, %s);" % ( newlyInsertVidNum + locs.index(locs[i]), dicSnapLoc['lon'], dicSnapLoc['lat']) cur.execute(sqlCommand) # Add four two road segments distSource2Snapped = geoDistance2D(street['sourceLoc'], snapLoc) distSnapped2Target = geoDistance2D(snapLoc, street['targetLoc']) ratio = distSource2Snapped / (distSource2Snapped + distSnapped2Target) dicSourceLoc = loc2Dict(street['sourceLoc']) dicTargetLoc = loc2Dict(street['targetLoc']) sqlCommand = " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, street['source'], newlyInsertVidNum + locs.index(locs[i]), distSource2Snapped, dicSourceLoc['lon'], dicSourceLoc['lat'], dicSnapLoc['lon'], dicSnapLoc['lat'], street['cost_s'] * ratio, street['reverse_cost_s'] * ratio) cur.execute(sqlCommand) sqlCommand = " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, newlyInsertVidNum + locs.index(locs[i]), street['target'], distSnapped2Target, dicSnapLoc['lon'], dicSnapLoc['lat'], dicTargetLoc['lon'], dicTargetLoc['lat'], street['cost_s'] * (1 - ratio), street['reverse_cost_s'] * (1 - ratio)) cur.execute(sqlCommand) sqlCommand = " select " sqlCommand += " start_vid as start_node, " sqlCommand += " end_vid as end_node, " sqlCommand += " sum(cost) as time, " sqlCommand += " sum(length_m) as distance " sqlCommand += " from (" sqlCommand += " select " sqlCommand += " a.*, " sqlCommand += " b.length_m" sqlCommand += " from pgr_dijkstra(" sqlCommand += " 'select gid as id, source, target, cost_s as cost, reverse_cost_s as reverse_cost from ways', " sqlCommand += " ARRAY%s, " % (startVidList) sqlCommand += " ARRAY%s, " % (endVidList) sqlCommand += " directed := true) a " sqlCommand += " left join " sqlCommand += " ways b " sqlCommand += " on " sqlCommand += " a.edge = b.gid " sqlCommand += " order by " sqlCommand += " a.path_seq" sqlCommand += " ) x " sqlCommand += " group by " sqlCommand += " start_vid, " sqlCommand += " end_vid;" cur.execute(sqlCommand) row = cur.fetchall() for i in range(len(startVidList)): sqlCommand = " delete from ways_vertices_pgr where id = %s;" % (startVidList[i]) cur.execute(sqlCommand) for i in range(len(endVidList)): sqlCommand = " delete from ways_vertices_pgr where id = %s;" % (endVidList[i]) cur.execute(sqlCommand) sqlCommand = " delete from ways where class_id = %s;" % (dummyClassID) cur.execute(sqlCommand) conn.close() rawDist = {} rawTime = {} for i in range(len(row)): rawTime[row[i][0], row[i][1]] = row[i][2] rawDist[row[i][0], row[i][1]] = row[i][3] distMeters = {} timeSecs = {} for i in range(len(fromLocs)): for j in range(len(toLocs)): try: distMeters[i, j] = rawDist[startVidList[i], endVidList[j]] except: distMeters[i, j] = 0 try: timeSecs[i, j] = rawTime[startVidList[i], endVidList[j]] except: timeSecs[i, j] = 0 return [timeSecs, distMeters]
37.902174
165
0.693031
from veroviz._common import * from veroviz._internal import locs2Dict from veroviz._internal import loc2Dict from veroviz._geometry import geoDistance2D def pgrGetSnapToRoadLatLon(gid, loc, databaseName): conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) cur = conn.cursor() dicLoc = loc2Dict(loc) sqlCommand = " select ST_X(point), ST_Y(point)" sqlCommand += " from (" sqlCommand += " select ST_ClosestPoint(" sqlCommand += " ST_GeomFromEWKT(CONCAT('SRID=4326; LINESTRING(',x1,' ',y1,', ',x2,' ',y2,')'))," sqlCommand += " ST_GeomFromEWKT('SRID=4326;POINT(%s %s)')) as point" % (dicLoc['lon'], dicLoc['lat']) sqlCommand += " from ways" sqlCommand += " where gid=%s" % (gid) sqlCommand += " ) a;" cur.execute(sqlCommand) row = cur.fetchone() snapLoc = [row[1], row[0]] conn.close() return snapLoc def pgrGetNearestStreet(loc, databaseName): conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) cur = conn.cursor() dicLoc = loc2Dict(loc) try: sqlCommand = " select gid, source, target, y1, x1, y2, x2, cost_s, reverse_cost_s, one_way" sqlCommand += " from " sqlCommand += " ways" sqlCommand += " where" sqlCommand += " x1 >= %s - 0.01 and x1 <= %s + 0.01" % (dicLoc['lon'], dicLoc['lon']) sqlCommand += " order by" sqlCommand += " ST_Distance(" sqlCommand += " ST_GeogFromText('SRID=4326; POINT(%s %s)')," % (dicLoc['lon'], dicLoc['lat']) sqlCommand += " ST_GeogFromText(CONCAT('SRID=4326; LINESTRING(',x1,' ',y1,', ',x2,' ',y2,')')))" sqlCommand += " limit 1;" cur.execute(sqlCommand) row = cur.fetchone() street = { "gid" : int(row[0]), "source" : int(row[1]), "target" : int(row[2]), "sourceLoc" : [row[3], row[4]], "targetLoc" : [row[5], row[6]], "cost_s" : row[7], "reverse_cost_s" : row[8], "one_way" : row[9] } except: sqlCommand = " select gid, source, target, y1, x1, y2, x2, length_m, cost_s, reverse_cost_s, one_way" sqlCommand += " from " sqlCommand += " ways" sqlCommand += " order by" sqlCommand += " ST_Distance(" sqlCommand += " ST_GeogFromText('SRID=4326; POINT(%s %s)')," % (dicLoc['lon'], dicLoc['lat']) sqlCommand += " ST_GeogFromText(CONCAT('SRID=4326; LINESTRING(',x1,' ',y1,', ',x2,' ',y2,')')))" sqlCommand += " limit 1;" cur.execute(sqlCommand) row = cur.fetchone() street = { "gid" : int(row[0]), "source" : int(row[1]), "target" : int(row[2]), "sourceLoc" : [row[3], row[4]], "targetLoc" : [row[5], row[6]], "cost_s" : row[7], "reverse_cost_s" : row[8], "one_way" : row[9] } conn.close() return street def pgrGetShapepointsTimeDist(startLoc, endLoc, databaseName): conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) conn.autocommit = True cur = conn.cursor() startStreet = pgrGetNearestStreet(startLoc, databaseName) snapStartLoc = pgrGetSnapToRoadLatLon(startStreet['gid'], startLoc, databaseName) dicSnapStartLoc = loc2Dict(snapStartLoc) distSnapStart2Source = geoDistance2D(snapStartLoc, startStreet['sourceLoc']) distSnapStart2Target = geoDistance2D(snapStartLoc, startStreet['targetLoc']) endStreet = pgrGetNearestStreet(endLoc, databaseName) snapEndLoc = pgrGetSnapToRoadLatLon(endStreet['gid'], endLoc, databaseName) dicSnapEndLoc = loc2Dict(snapEndLoc) distSnapEnd2Source = geoDistance2D(snapEndLoc, endStreet['sourceLoc']) distSnapEnd2Target = geoDistance2D(snapEndLoc, endStreet['targetLoc']) sqlCommand = " select count(*) from ways_vertices_pgr;" cur.execute(sqlCommand) row = cur.fetchone() newlyInsertVidNum = int(row[0]) + 1 dummyClassID = 821 sqlCommand = " insert into ways_vertices_pgr (id, lon, lat) values (%s, %s, %s);" % ( newlyInsertVidNum, dicSnapStartLoc['lon'], dicSnapStartLoc['lat']) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, newlyInsertVidNum, startStreet['target'], distSnapStart2Target, dicSnapStartLoc['lon'], dicSnapStartLoc['lat'], startStreet['targetLoc'][1], startStreet['targetLoc'][0], startStreet['cost_s'] * distSnapStart2Target / (distSnapStart2Target + distSnapStart2Source), startStreet['reverse_cost_s'] * distSnapStart2Target / (distSnapStart2Target + distSnapStart2Source)) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, startStreet['source'], newlyInsertVidNum, distSnapStart2Source, startStreet['sourceLoc'][1], startStreet['sourceLoc'][0], dicSnapStartLoc['lon'], dicSnapStartLoc['lat'], startStreet['cost_s'] * distSnapStart2Source / (distSnapStart2Target + distSnapStart2Source), startStreet['reverse_cost_s'] * distSnapStart2Source / (distSnapStart2Target + distSnapStart2Source)) sqlCommand += " insert into ways_vertices_pgr (id, lon, lat) values (%s, %s, %s);" % ( newlyInsertVidNum + 1, dicSnapEndLoc['lon'], dicSnapEndLoc['lat']) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, newlyInsertVidNum + 1, endStreet['target'], distSnapEnd2Target, dicSnapEndLoc['lon'], dicSnapEndLoc['lat'], endStreet['targetLoc'][1], endStreet['targetLoc'][0], endStreet['cost_s'] * distSnapEnd2Target / (distSnapEnd2Target + distSnapEnd2Source), endStreet['reverse_cost_s'] * distSnapEnd2Target / (distSnapEnd2Target + distSnapEnd2Source)) sqlCommand += " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, endStreet['source'], newlyInsertVidNum + 1, distSnapEnd2Source, endStreet['sourceLoc'][1], endStreet['sourceLoc'][0], dicSnapEndLoc['lon'], dicSnapEndLoc['lat'], endStreet['cost_s'] * distSnapEnd2Source / (distSnapEnd2Target + distSnapEnd2Source), endStreet['reverse_cost_s'] * distSnapEnd2Source / (distSnapEnd2Target + distSnapEnd2Source)) sqlCommand += " select b.gid as gid, b.y1 as lats1, b.x1 as lons1, b.y2 as lats2, b.x2 as lons2, a.cost as secs, b.length_m as dist " sqlCommand += " from " sqlCommand += " pgr_dijkstra(" sqlCommand += " 'select gid as id, source, target, cost_s as cost, reverse_cost_s as reverse_cost from ways'," sqlCommand += " %s," % (newlyInsertVidNum) sqlCommand += " %s," % (newlyInsertVidNum + 1) sqlCommand += " directed := true" sqlCommand += " ) a" sqlCommand += " left join" sqlCommand += " ways b" sqlCommand += " on a.edge = b.gid" sqlCommand += " order by a.path_seq" cur.execute(sqlCommand) row = cur.fetchall() summary = pd.DataFrame(row, columns=['gid', 'lats1', 'lons1', 'lats2', 'lons2', 'secs', 'dist']) sqlCommand = " delete from ways_vertices_pgr where id = (%s);" % (newlyInsertVidNum) sqlCommand += " delete from ways_vertices_pgr where id = (%s);" % (newlyInsertVidNum + 1) sqlCommand += " delete from ways where class_id = %s;" % (dummyClassID) cur.execute(sqlCommand) summary.drop(summary.index[len(summary) - 1], inplace = True) lats1 = summary['lats1'].tolist() lons1 = summary['lons1'].tolist() lats2 = summary['lats2'].tolist() lons2 = summary['lons2'].tolist() path = [] path.append(startLoc) for i in range(1, len(lats1)): if (lats1[i] != lats1[i - 1] and lats1[i] != lats2[i - 1]): path.append([lats1[i], lons1[i]]) else: path.append([lats2[i], lons2[i]]) timeSecs = summary['secs'].tolist() distMeters = summary['dist'].tolist() conn.close() return [path, timeSecs, distMeters] def pgrGetTimeDist(fromLocs, toLocs, databaseName): conn = psycopg2.connect("dbname='%s' user='%s' host='%s' password='%s'" % ( databaseName, config['VRV_SETTING_PGROUTING_USERNAME'], config['VRV_SETTING_PGROUTING_HOST'], config['VRV_SETTING_PGROUTING_PASSWORD'])) conn.autocommit = True cur = conn.cursor() dummyClassID = 821 sqlCommand = " select max(id) from ways_vertices_pgr;" cur.execute(sqlCommand) row = cur.fetchone() newlyInsertVidNum = int(row[0]) + 1 locs = fromLocs.copy() for i in range(len(toLocs)): try: locs.index(toLocs[i]) except ValueError: locs.append(toLocs[i]) startVidList = [] endVidList = [] for i in range(len(fromLocs)): startVidList.append(newlyInsertVidNum + locs.index(fromLocs[i])) for i in range(len(toLocs)): endVidList.append(newlyInsertVidNum + locs.index(toLocs[i])) for i in range(len(locs)): street = pgrGetNearestStreet(locs[i], databaseName) snapLoc = pgrGetSnapToRoadLatLon(street['gid'], locs[i], databaseName) dicSnapLoc = loc2Dict(snapLoc) sqlCommand = " insert into ways_vertices_pgr (id, lon, lat) values (%s, %s, %s);" % ( newlyInsertVidNum + locs.index(locs[i]), dicSnapLoc['lon'], dicSnapLoc['lat']) cur.execute(sqlCommand) distSource2Snapped = geoDistance2D(street['sourceLoc'], snapLoc) distSnapped2Target = geoDistance2D(snapLoc, street['targetLoc']) ratio = distSource2Snapped / (distSource2Snapped + distSnapped2Target) dicSourceLoc = loc2Dict(street['sourceLoc']) dicTargetLoc = loc2Dict(street['targetLoc']) sqlCommand = " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, street['source'], newlyInsertVidNum + locs.index(locs[i]), distSource2Snapped, dicSourceLoc['lon'], dicSourceLoc['lat'], dicSnapLoc['lon'], dicSnapLoc['lat'], street['cost_s'] * ratio, street['reverse_cost_s'] * ratio) cur.execute(sqlCommand) sqlCommand = " insert into ways (class_id, source, target, length_m, x1, y1, x2, y2, cost_s, reverse_cost_s) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" % ( dummyClassID, newlyInsertVidNum + locs.index(locs[i]), street['target'], distSnapped2Target, dicSnapLoc['lon'], dicSnapLoc['lat'], dicTargetLoc['lon'], dicTargetLoc['lat'], street['cost_s'] * (1 - ratio), street['reverse_cost_s'] * (1 - ratio)) cur.execute(sqlCommand) sqlCommand = " select " sqlCommand += " start_vid as start_node, " sqlCommand += " end_vid as end_node, " sqlCommand += " sum(cost) as time, " sqlCommand += " sum(length_m) as distance " sqlCommand += " from (" sqlCommand += " select " sqlCommand += " a.*, " sqlCommand += " b.length_m" sqlCommand += " from pgr_dijkstra(" sqlCommand += " 'select gid as id, source, target, cost_s as cost, reverse_cost_s as reverse_cost from ways', " sqlCommand += " ARRAY%s, " % (startVidList) sqlCommand += " ARRAY%s, " % (endVidList) sqlCommand += " directed := true) a " sqlCommand += " left join " sqlCommand += " ways b " sqlCommand += " on " sqlCommand += " a.edge = b.gid " sqlCommand += " order by " sqlCommand += " a.path_seq" sqlCommand += " ) x " sqlCommand += " group by " sqlCommand += " start_vid, " sqlCommand += " end_vid;" cur.execute(sqlCommand) row = cur.fetchall() for i in range(len(startVidList)): sqlCommand = " delete from ways_vertices_pgr where id = %s;" % (startVidList[i]) cur.execute(sqlCommand) for i in range(len(endVidList)): sqlCommand = " delete from ways_vertices_pgr where id = %s;" % (endVidList[i]) cur.execute(sqlCommand) sqlCommand = " delete from ways where class_id = %s;" % (dummyClassID) cur.execute(sqlCommand) conn.close() rawDist = {} rawTime = {} for i in range(len(row)): rawTime[row[i][0], row[i][1]] = row[i][2] rawDist[row[i][0], row[i][1]] = row[i][3] distMeters = {} timeSecs = {} for i in range(len(fromLocs)): for j in range(len(toLocs)): try: distMeters[i, j] = rawDist[startVidList[i], endVidList[j]] except: distMeters[i, j] = 0 try: timeSecs[i, j] = rawTime[startVidList[i], endVidList[j]] except: timeSecs[i, j] = 0 return [timeSecs, distMeters]
true
true
f711eeeea65596f32f53745ed391001983450cc7
5,830
py
Python
imcsdk/mometa/pci/PciEquipSlot.py
ragupta-git/ImcSdk
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
[ "Apache-2.0" ]
null
null
null
imcsdk/mometa/pci/PciEquipSlot.py
ragupta-git/ImcSdk
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
[ "Apache-2.0" ]
null
null
null
imcsdk/mometa/pci/PciEquipSlot.py
ragupta-git/ImcSdk
2e41f2ffe5282d38de85bc4739fa53dd2f0c9bb4
[ "Apache-2.0" ]
3
2018-11-14T13:02:40.000Z
2018-11-14T13:49:38.000Z
"""This module contains the general information for PciEquipSlot ManagedObject.""" from ...imcmo import ManagedObject from ...imccoremeta import MoPropertyMeta, MoMeta from ...imcmeta import VersionMeta class PciEquipSlotConsts: pass class PciEquipSlot(ManagedObject): """This is PciEquipSlot class.""" consts = PciEquipSlotConsts() naming_props = set([u'id']) mo_meta = { "classic": MoMeta("PciEquipSlot", "pciEquipSlot", "equipped-slot-[id]", VersionMeta.Version151f, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'faultInst', u'gpuInventory'], ["Get"]), "modular": MoMeta("PciEquipSlot", "pciEquipSlot", "equipped-slot-[id]", VersionMeta.Version2013e, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'faultInst'], ["Get"]) } prop_meta = { "classic": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "controller_reported": MoPropertyMeta("controller_reported", "controllerReported", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "id": MoPropertyMeta("id", "id", "string", VersionMeta.Version151f, MoPropertyMeta.NAMING, None, None, None, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "option_rom_status": MoPropertyMeta("option_rom_status", "optionROMStatus", "string", VersionMeta.Version303a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "smbios_id": MoPropertyMeta("smbios_id", "smbiosId", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "version": MoPropertyMeta("version", "version", "string", VersionMeta.Version201a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, "modular": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "controller_reported": MoPropertyMeta("controller_reported", "controllerReported", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "id": MoPropertyMeta("id", "id", "string", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, None, None, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "option_rom_status": MoPropertyMeta("option_rom_status", "optionROMStatus", "string", VersionMeta.Version303a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "smbios_id": MoPropertyMeta("smbios_id", "smbiosId", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "version": MoPropertyMeta("version", "version", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, } prop_map = { "classic": { "childAction": "child_action", "controllerReported": "controller_reported", "dn": "dn", "id": "id", "model": "model", "optionROMStatus": "option_rom_status", "rn": "rn", "smbiosId": "smbios_id", "status": "status", "vendor": "vendor", "version": "version", }, "modular": { "childAction": "child_action", "controllerReported": "controller_reported", "dn": "dn", "id": "id", "model": "model", "optionROMStatus": "option_rom_status", "rn": "rn", "smbiosId": "smbios_id", "status": "status", "vendor": "vendor", "version": "version", }, } def __init__(self, parent_mo_or_dn, id, **kwargs): self._dirty_mask = 0 self.id = id self.child_action = None self.controller_reported = None self.model = None self.option_rom_status = None self.smbios_id = None self.status = None self.vendor = None self.version = None ManagedObject.__init__(self, "PciEquipSlot", parent_mo_or_dn, **kwargs)
57.156863
235
0.612864
from ...imcmo import ManagedObject from ...imccoremeta import MoPropertyMeta, MoMeta from ...imcmeta import VersionMeta class PciEquipSlotConsts: pass class PciEquipSlot(ManagedObject): consts = PciEquipSlotConsts() naming_props = set([u'id']) mo_meta = { "classic": MoMeta("PciEquipSlot", "pciEquipSlot", "equipped-slot-[id]", VersionMeta.Version151f, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'computeRackUnit'], [u'faultInst', u'gpuInventory'], ["Get"]), "modular": MoMeta("PciEquipSlot", "pciEquipSlot", "equipped-slot-[id]", VersionMeta.Version2013e, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], [u'computeServerNode'], [u'faultInst'], ["Get"]) } prop_meta = { "classic": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "controller_reported": MoPropertyMeta("controller_reported", "controllerReported", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "id": MoPropertyMeta("id", "id", "string", VersionMeta.Version151f, MoPropertyMeta.NAMING, None, None, None, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "option_rom_status": MoPropertyMeta("option_rom_status", "optionROMStatus", "string", VersionMeta.Version303a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "smbios_id": MoPropertyMeta("smbios_id", "smbiosId", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version151f, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "version": MoPropertyMeta("version", "version", "string", VersionMeta.Version201a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, "modular": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "controller_reported": MoPropertyMeta("controller_reported", "controllerReported", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "id": MoPropertyMeta("id", "id", "string", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, None, None, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "option_rom_status": MoPropertyMeta("option_rom_status", "optionROMStatus", "string", VersionMeta.Version303a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "smbios_id": MoPropertyMeta("smbios_id", "smbiosId", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "version": MoPropertyMeta("version", "version", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), }, } prop_map = { "classic": { "childAction": "child_action", "controllerReported": "controller_reported", "dn": "dn", "id": "id", "model": "model", "optionROMStatus": "option_rom_status", "rn": "rn", "smbiosId": "smbios_id", "status": "status", "vendor": "vendor", "version": "version", }, "modular": { "childAction": "child_action", "controllerReported": "controller_reported", "dn": "dn", "id": "id", "model": "model", "optionROMStatus": "option_rom_status", "rn": "rn", "smbiosId": "smbios_id", "status": "status", "vendor": "vendor", "version": "version", }, } def __init__(self, parent_mo_or_dn, id, **kwargs): self._dirty_mask = 0 self.id = id self.child_action = None self.controller_reported = None self.model = None self.option_rom_status = None self.smbios_id = None self.status = None self.vendor = None self.version = None ManagedObject.__init__(self, "PciEquipSlot", parent_mo_or_dn, **kwargs)
true
true
f711ef4632a8ebbc51846a8b824d23dfbc6b810f
1,030
py
Python
old_code/pandas_order.py
yuguiyang/python_demo
1be2406bfc920e22a0f92bf10d9a3665984067ba
[ "Apache-2.0" ]
null
null
null
old_code/pandas_order.py
yuguiyang/python_demo
1be2406bfc920e22a0f92bf10d9a3665984067ba
[ "Apache-2.0" ]
null
null
null
old_code/pandas_order.py
yuguiyang/python_demo
1be2406bfc920e22a0f92bf10d9a3665984067ba
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Fri Jul 28 13:42:30 2017 @author: hexo """ import numpy as np import pandas as pd #读取第一个sheet页 df = pd.read_excel('D:\Tableau_data\示例 - 超市.xls',sheetname=0) print(type(df)) #每一列的数据类型 print(df.dtypes) #每种类型的数量 print(df.get_dtype_counts()) #还不知道这个ftype到底是干嘛的,sparse|dense,稀疏|密集,表示什么呢? print(df.ftypes) print(df.get_ftype_counts()) top_10_data=df.head(10) #print(top_10_data) print('----------------------------') #axis=0表示纵轴,axis=1表示横轴 #这是每一列,每一列的均值 print(top_10_data.mean(axis=0)) print('----------------------------') #这是每一行,每一行的均值 print(top_10_data.mean(axis=1)) print('----------------------------') #sort_index #坑啊,这个axis到底是个什么鬼(ok) #但是这个level是干嘛的依然没有搞懂 #按第1列降序排列 #print(top_10_data.sort_index(axis=0,level=0,ascending=True)) #print(top_10_data.sort_index(axis=0,level=1,ascending=True)) print(top_10_data) print('----------------------------') #终于成功按照订单日期降序排列了!!! #这里按多了排序的话,貌似只可以执行一个排序方式,都是降序 print(top_10_data.sort_values(by=['订单日期','行 ID'] , ascending=False).head(2))
18.727273
76
0.659223
import numpy as np import pandas as pd df = pd.read_excel('D:\Tableau_data\示例 - 超市.xls',sheetname=0) print(type(df)) print(df.dtypes) print(df.get_dtype_counts()) print(df.ftypes) print(df.get_ftype_counts()) top_10_data=df.head(10) print('----------------------------') print(top_10_data.mean(axis=0)) print('----------------------------') print(top_10_data.mean(axis=1)) print('----------------------------') print(top_10_data) print('----------------------------') print(top_10_data.sort_values(by=['订单日期','行 ID'] , ascending=False).head(2))
true
true
f711ef49feb10977288d0f681356ef6504cb4062
488
py
Python
blog/migrations/0002_post_subtitle.py
EmmanuelPerezP/MyBlog
247fb638561df264701ebda91347c28136d12061
[ "MIT" ]
null
null
null
blog/migrations/0002_post_subtitle.py
EmmanuelPerezP/MyBlog
247fb638561df264701ebda91347c28136d12061
[ "MIT" ]
null
null
null
blog/migrations/0002_post_subtitle.py
EmmanuelPerezP/MyBlog
247fb638561df264701ebda91347c28136d12061
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-08-01 03:00 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('blog', '0001_initial'), ] operations = [ migrations.AddField( model_name='post', name='subtitle', field=models.CharField(default='test', max_length=255), preserve_default=False, ), ]
22.181818
67
0.604508
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('blog', '0001_initial'), ] operations = [ migrations.AddField( model_name='post', name='subtitle', field=models.CharField(default='test', max_length=255), preserve_default=False, ), ]
true
true
f711f1b1b52a7f3dc945583c49c1999f046af643
747
py
Python
user/views.py
prajilmv/recipe-app-api
f469168922e520d423d7207f69290cac172fc32e
[ "MIT" ]
null
null
null
user/views.py
prajilmv/recipe-app-api
f469168922e520d423d7207f69290cac172fc32e
[ "MIT" ]
null
null
null
user/views.py
prajilmv/recipe-app-api
f469168922e520d423d7207f69290cac172fc32e
[ "MIT" ]
null
null
null
from rest_framework import generics, authentication, permissions from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.settings import api_settings from user.serializers import UserSerailizer, AuthTokenSerializer class CreateUserView(generics.CreateAPIView): serializer_class = UserSerailizer class CreateToeknView(ObtainAuthToken): serializer_class = AuthTokenSerializer renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES class ManageUserView(generics.RetrieveUpdateAPIView): serializer_class = UserSerailizer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated,) def get_object(self): return self.request.user
32.478261
66
0.828648
from rest_framework import generics, authentication, permissions from rest_framework.authtoken.views import ObtainAuthToken from rest_framework.settings import api_settings from user.serializers import UserSerailizer, AuthTokenSerializer class CreateUserView(generics.CreateAPIView): serializer_class = UserSerailizer class CreateToeknView(ObtainAuthToken): serializer_class = AuthTokenSerializer renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES class ManageUserView(generics.RetrieveUpdateAPIView): serializer_class = UserSerailizer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated,) def get_object(self): return self.request.user
true
true
f711f2f36decd5351d75ec27de13bd15b0cad0af
17,654
py
Python
tempest/common/isolated_creds.py
Mirantis/tempest
ae7e033fef80f2a4728a13bba18123f6fe32839a
[ "Apache-2.0" ]
3
2015-03-03T15:43:06.000Z
2016-10-24T06:12:40.000Z
tempest/common/isolated_creds.py
Mirantis/tempest
ae7e033fef80f2a4728a13bba18123f6fe32839a
[ "Apache-2.0" ]
null
null
null
tempest/common/isolated_creds.py
Mirantis/tempest
ae7e033fef80f2a4728a13bba18123f6fe32839a
[ "Apache-2.0" ]
null
null
null
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import netaddr from tempest import auth from tempest import clients from tempest.common import cred_provider from tempest.common.utils import data_utils from tempest import config from tempest import exceptions from tempest.openstack.common import log as logging CONF = config.CONF LOG = logging.getLogger(__name__) class IsolatedCreds(cred_provider.CredentialProvider): def __init__(self, name, tempest_client=True, interface='json', password='pass', network_resources=None): super(IsolatedCreds, self).__init__(name, tempest_client, interface, password, network_resources) self.network_resources = network_resources self.isolated_creds = {} self.isolated_net_resources = {} self.ports = [] self.tempest_client = tempest_client self.interface = interface self.password = password self.identity_admin_client, self.network_admin_client = ( self._get_admin_clients()) def _get_admin_clients(self): """ Returns a tuple with instances of the following admin clients (in this order): identity network """ if self.tempest_client: os = clients.AdminManager(interface=self.interface) else: os = clients.OfficialClientManager( auth.get_default_credentials('identity_admin') ) return os.identity_client, os.network_client def _create_tenant(self, name, description): if self.tempest_client: _, tenant = self.identity_admin_client.create_tenant( name=name, description=description) else: tenant = self.identity_admin_client.tenants.create( name, description=description) return tenant def _get_tenant_by_name(self, name): if self.tempest_client: _, tenant = self.identity_admin_client.get_tenant_by_name(name) else: tenants = self.identity_admin_client.tenants.list() for ten in tenants: if ten['name'] == name: tenant = ten break else: raise exceptions.NotFound('No such tenant') return tenant def _create_user(self, username, password, tenant, email): if self.tempest_client: _, user = self.identity_admin_client.create_user(username, password, tenant['id'], email) else: user = self.identity_admin_client.users.create(username, password, email, tenant_id=tenant.id) return user def _get_user(self, tenant, username): if self.tempest_client: _, user = self.identity_admin_client.get_user_by_username( tenant['id'], username) else: user = self.identity_admin_client.users.get(username) return user def _list_roles(self): if self.tempest_client: _, roles = self.identity_admin_client.list_roles() else: roles = self.identity_admin_client.roles.list() return roles def _assign_user_role(self, tenant, user, role_name): role = None try: roles = self._list_roles() if self.tempest_client: role = next(r for r in roles if r['name'] == role_name) else: role = next(r for r in roles if r.name == role_name) except StopIteration: msg = 'No "%s" role found' % role_name raise exceptions.NotFound(msg) if self.tempest_client: self.identity_admin_client.assign_user_role(tenant['id'], user['id'], role['id']) else: self.identity_admin_client.roles.add_user_role(user.id, role.id, tenant.id) def _delete_user(self, user): if self.tempest_client: self.identity_admin_client.delete_user(user) else: self.identity_admin_client.users.delete(user) def _delete_tenant(self, tenant): if self.tempest_client: self.identity_admin_client.delete_tenant(tenant) else: self.identity_admin_client.tenants.delete(tenant) def _create_creds(self, suffix="", admin=False): """Create random credentials under the following schema. If the name contains a '.' is the full class path of something, and we don't really care. If it isn't, it's probably a meaningful name, so use it. For logging purposes, -user and -tenant are long and redundant, don't use them. The user# will be sufficient to figure it out. """ if '.' in self.name: root = "" else: root = self.name tenant_name = data_utils.rand_name(root) + suffix tenant_desc = tenant_name + "-desc" tenant = self._create_tenant(name=tenant_name, description=tenant_desc) username = data_utils.rand_name(root) + suffix email = data_utils.rand_name(root) + suffix + "@example.com" user = self._create_user(username, self.password, tenant, email) # NOTE(andrey-mp): user needs this role to create containers in swift swift_operator_role = CONF.object_storage.operator_role self._assign_user_role(tenant, user, swift_operator_role) if admin: self._assign_user_role(tenant, user, CONF.identity.admin_role) return self._get_credentials(user, tenant) def _get_credentials(self, user, tenant): if self.tempest_client: user_get = user.get tenant_get = tenant.get else: user_get = user.__dict__.get tenant_get = tenant.__dict__.get return auth.get_credentials( username=user_get('name'), user_id=user_get('id'), tenant_name=tenant_get('name'), tenant_id=tenant_get('id'), password=self.password) def _create_network_resources(self, tenant_id): network = None subnet = None router = None # Make sure settings if self.network_resources: if self.network_resources['router']: if (not self.network_resources['subnet'] or not self.network_resources['network']): raise exceptions.InvalidConfiguration( 'A router requires a subnet and network') elif self.network_resources['subnet']: if not self.network_resources['network']: raise exceptions.InvalidConfiguration( 'A subnet requires a network') elif self.network_resources['dhcp']: raise exceptions.InvalidConfiguration('DHCP requires a subnet') data_utils.rand_name_root = data_utils.rand_name(self.name) if not self.network_resources or self.network_resources['network']: network_name = data_utils.rand_name_root + "-network" network = self._create_network(network_name, tenant_id) try: if not self.network_resources or self.network_resources['subnet']: subnet_name = data_utils.rand_name_root + "-subnet" subnet = self._create_subnet(subnet_name, tenant_id, network['id']) if not self.network_resources or self.network_resources['router']: router_name = data_utils.rand_name_root + "-router" router = self._create_router(router_name, tenant_id) self._add_router_interface(router['id'], subnet['id']) except Exception: if router: self._clear_isolated_router(router['id'], router['name']) if subnet: self._clear_isolated_subnet(subnet['id'], subnet['name']) if network: self._clear_isolated_network(network['id'], network['name']) raise return network, subnet, router def _create_network(self, name, tenant_id): if self.tempest_client: resp, resp_body = self.network_admin_client.create_network( name=name, tenant_id=tenant_id) else: body = {'network': {'tenant_id': tenant_id, 'name': name}} resp_body = self.network_admin_client.create_network(body) return resp_body['network'] def _create_subnet(self, subnet_name, tenant_id, network_id): if not self.tempest_client: body = {'subnet': {'name': subnet_name, 'tenant_id': tenant_id, 'network_id': network_id, 'ip_version': 4}} if self.network_resources: body['enable_dhcp'] = self.network_resources['dhcp'] base_cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr) mask_bits = CONF.network.tenant_network_mask_bits for subnet_cidr in base_cidr.subnet(mask_bits): try: if self.tempest_client: if self.network_resources: resp, resp_body = self.network_admin_client.\ create_subnet( network_id=network_id, cidr=str(subnet_cidr), name=subnet_name, tenant_id=tenant_id, enable_dhcp=self.network_resources['dhcp'], ip_version=4) else: resp, resp_body = self.network_admin_client.\ create_subnet(network_id=network_id, cidr=str(subnet_cidr), name=subnet_name, tenant_id=tenant_id, ip_version=4) else: body['subnet']['cidr'] = str(subnet_cidr) resp_body = self.network_admin_client.create_subnet(body) break except exceptions.BadRequest as e: if 'overlaps with another subnet' not in str(e): raise else: e = exceptions.BuildErrorException() e.message = 'Available CIDR for subnet creation could not be found' raise e return resp_body['subnet'] def _create_router(self, router_name, tenant_id): external_net_id = dict( network_id=CONF.network.public_network_id) if self.tempest_client: resp, resp_body = self.network_admin_client.create_router( router_name, external_gateway_info=external_net_id, tenant_id=tenant_id) else: body = {'router': {'name': router_name, 'tenant_id': tenant_id, 'external_gateway_info': external_net_id, 'admin_state_up': True}} resp_body = self.network_admin_client.create_router(body) return resp_body['router'] def _add_router_interface(self, router_id, subnet_id): if self.tempest_client: self.network_admin_client.add_router_interface_with_subnet_id( router_id, subnet_id) else: body = {'subnet_id': subnet_id} self.network_admin_client.add_interface_router(router_id, body) def get_primary_network(self): return self.isolated_net_resources.get('primary')[0] def get_primary_subnet(self): return self.isolated_net_resources.get('primary')[1] def get_primary_router(self): return self.isolated_net_resources.get('primary')[2] def get_admin_network(self): return self.isolated_net_resources.get('admin')[0] def get_admin_subnet(self): return self.isolated_net_resources.get('admin')[1] def get_admin_router(self): return self.isolated_net_resources.get('admin')[2] def get_alt_network(self): return self.isolated_net_resources.get('alt')[0] def get_alt_subnet(self): return self.isolated_net_resources.get('alt')[1] def get_alt_router(self): return self.isolated_net_resources.get('alt')[2] def get_credentials(self, credential_type): if self.isolated_creds.get(credential_type): credentials = self.isolated_creds[credential_type] else: is_admin = (credential_type == 'admin') credentials = self._create_creds(admin=is_admin) self.isolated_creds[credential_type] = credentials # Maintained until tests are ported LOG.info("Acquired isolated creds:\n credentials: %s" % credentials) if (CONF.service_available.neutron and not CONF.baremetal.driver_enabled): network, subnet, router = self._create_network_resources( credentials.tenant_id) self.isolated_net_resources[credential_type] = ( network, subnet, router,) LOG.info("Created isolated network resources for : \n" + " credentials: %s" % credentials) return credentials def get_primary_creds(self): return self.get_credentials('primary') def get_admin_creds(self): return self.get_credentials('admin') def get_alt_creds(self): return self.get_credentials('alt') def _clear_isolated_router(self, router_id, router_name): net_client = self.network_admin_client try: net_client.delete_router(router_id) except exceptions.NotFound: LOG.warn('router with name: %s not found for delete' % router_name) def _clear_isolated_subnet(self, subnet_id, subnet_name): net_client = self.network_admin_client try: net_client.delete_subnet(subnet_id) except exceptions.NotFound: LOG.warn('subnet with name: %s not found for delete' % subnet_name) def _clear_isolated_network(self, network_id, network_name): net_client = self.network_admin_client try: net_client.delete_network(network_id) except exceptions.NotFound: LOG.warn('network with name: %s not found for delete' % network_name) def _clear_isolated_net_resources(self): net_client = self.network_admin_client for cred in self.isolated_net_resources: network, subnet, router = self.isolated_net_resources.get(cred) LOG.debug("Clearing network: %(network)s, " "subnet: %(subnet)s, router: %(router)s", {'network': network, 'subnet': subnet, 'router': router}) if (not self.network_resources or self.network_resources.get('router')): try: if self.tempest_client: net_client.remove_router_interface_with_subnet_id( router['id'], subnet['id']) else: body = {'subnet_id': subnet['id']} net_client.remove_interface_router(router['id'], body) except exceptions.NotFound: LOG.warn('router with name: %s not found for delete' % router['name']) self._clear_isolated_router(router['id'], router['name']) if (not self.network_resources or self.network_resources.get('subnet')): self._clear_isolated_subnet(subnet['id'], subnet['name']) if (not self.network_resources or self.network_resources.get('network')): self._clear_isolated_network(network['id'], network['name']) def clear_isolated_creds(self): if not self.isolated_creds: return self._clear_isolated_net_resources() for creds in self.isolated_creds.itervalues(): try: self._delete_user(creds.user_id) except exceptions.NotFound: LOG.warn("user with name: %s not found for delete" % creds.username) try: self._delete_tenant(creds.tenant_id) except exceptions.NotFound: LOG.warn("tenant with name: %s not found for delete" % creds.tenant_name)
42.23445
79
0.582588
import netaddr from tempest import auth from tempest import clients from tempest.common import cred_provider from tempest.common.utils import data_utils from tempest import config from tempest import exceptions from tempest.openstack.common import log as logging CONF = config.CONF LOG = logging.getLogger(__name__) class IsolatedCreds(cred_provider.CredentialProvider): def __init__(self, name, tempest_client=True, interface='json', password='pass', network_resources=None): super(IsolatedCreds, self).__init__(name, tempest_client, interface, password, network_resources) self.network_resources = network_resources self.isolated_creds = {} self.isolated_net_resources = {} self.ports = [] self.tempest_client = tempest_client self.interface = interface self.password = password self.identity_admin_client, self.network_admin_client = ( self._get_admin_clients()) def _get_admin_clients(self): if self.tempest_client: os = clients.AdminManager(interface=self.interface) else: os = clients.OfficialClientManager( auth.get_default_credentials('identity_admin') ) return os.identity_client, os.network_client def _create_tenant(self, name, description): if self.tempest_client: _, tenant = self.identity_admin_client.create_tenant( name=name, description=description) else: tenant = self.identity_admin_client.tenants.create( name, description=description) return tenant def _get_tenant_by_name(self, name): if self.tempest_client: _, tenant = self.identity_admin_client.get_tenant_by_name(name) else: tenants = self.identity_admin_client.tenants.list() for ten in tenants: if ten['name'] == name: tenant = ten break else: raise exceptions.NotFound('No such tenant') return tenant def _create_user(self, username, password, tenant, email): if self.tempest_client: _, user = self.identity_admin_client.create_user(username, password, tenant['id'], email) else: user = self.identity_admin_client.users.create(username, password, email, tenant_id=tenant.id) return user def _get_user(self, tenant, username): if self.tempest_client: _, user = self.identity_admin_client.get_user_by_username( tenant['id'], username) else: user = self.identity_admin_client.users.get(username) return user def _list_roles(self): if self.tempest_client: _, roles = self.identity_admin_client.list_roles() else: roles = self.identity_admin_client.roles.list() return roles def _assign_user_role(self, tenant, user, role_name): role = None try: roles = self._list_roles() if self.tempest_client: role = next(r for r in roles if r['name'] == role_name) else: role = next(r for r in roles if r.name == role_name) except StopIteration: msg = 'No "%s" role found' % role_name raise exceptions.NotFound(msg) if self.tempest_client: self.identity_admin_client.assign_user_role(tenant['id'], user['id'], role['id']) else: self.identity_admin_client.roles.add_user_role(user.id, role.id, tenant.id) def _delete_user(self, user): if self.tempest_client: self.identity_admin_client.delete_user(user) else: self.identity_admin_client.users.delete(user) def _delete_tenant(self, tenant): if self.tempest_client: self.identity_admin_client.delete_tenant(tenant) else: self.identity_admin_client.tenants.delete(tenant) def _create_creds(self, suffix="", admin=False): if '.' in self.name: root = "" else: root = self.name tenant_name = data_utils.rand_name(root) + suffix tenant_desc = tenant_name + "-desc" tenant = self._create_tenant(name=tenant_name, description=tenant_desc) username = data_utils.rand_name(root) + suffix email = data_utils.rand_name(root) + suffix + "@example.com" user = self._create_user(username, self.password, tenant, email) swift_operator_role = CONF.object_storage.operator_role self._assign_user_role(tenant, user, swift_operator_role) if admin: self._assign_user_role(tenant, user, CONF.identity.admin_role) return self._get_credentials(user, tenant) def _get_credentials(self, user, tenant): if self.tempest_client: user_get = user.get tenant_get = tenant.get else: user_get = user.__dict__.get tenant_get = tenant.__dict__.get return auth.get_credentials( username=user_get('name'), user_id=user_get('id'), tenant_name=tenant_get('name'), tenant_id=tenant_get('id'), password=self.password) def _create_network_resources(self, tenant_id): network = None subnet = None router = None if self.network_resources: if self.network_resources['router']: if (not self.network_resources['subnet'] or not self.network_resources['network']): raise exceptions.InvalidConfiguration( 'A router requires a subnet and network') elif self.network_resources['subnet']: if not self.network_resources['network']: raise exceptions.InvalidConfiguration( 'A subnet requires a network') elif self.network_resources['dhcp']: raise exceptions.InvalidConfiguration('DHCP requires a subnet') data_utils.rand_name_root = data_utils.rand_name(self.name) if not self.network_resources or self.network_resources['network']: network_name = data_utils.rand_name_root + "-network" network = self._create_network(network_name, tenant_id) try: if not self.network_resources or self.network_resources['subnet']: subnet_name = data_utils.rand_name_root + "-subnet" subnet = self._create_subnet(subnet_name, tenant_id, network['id']) if not self.network_resources or self.network_resources['router']: router_name = data_utils.rand_name_root + "-router" router = self._create_router(router_name, tenant_id) self._add_router_interface(router['id'], subnet['id']) except Exception: if router: self._clear_isolated_router(router['id'], router['name']) if subnet: self._clear_isolated_subnet(subnet['id'], subnet['name']) if network: self._clear_isolated_network(network['id'], network['name']) raise return network, subnet, router def _create_network(self, name, tenant_id): if self.tempest_client: resp, resp_body = self.network_admin_client.create_network( name=name, tenant_id=tenant_id) else: body = {'network': {'tenant_id': tenant_id, 'name': name}} resp_body = self.network_admin_client.create_network(body) return resp_body['network'] def _create_subnet(self, subnet_name, tenant_id, network_id): if not self.tempest_client: body = {'subnet': {'name': subnet_name, 'tenant_id': tenant_id, 'network_id': network_id, 'ip_version': 4}} if self.network_resources: body['enable_dhcp'] = self.network_resources['dhcp'] base_cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr) mask_bits = CONF.network.tenant_network_mask_bits for subnet_cidr in base_cidr.subnet(mask_bits): try: if self.tempest_client: if self.network_resources: resp, resp_body = self.network_admin_client.\ create_subnet( network_id=network_id, cidr=str(subnet_cidr), name=subnet_name, tenant_id=tenant_id, enable_dhcp=self.network_resources['dhcp'], ip_version=4) else: resp, resp_body = self.network_admin_client.\ create_subnet(network_id=network_id, cidr=str(subnet_cidr), name=subnet_name, tenant_id=tenant_id, ip_version=4) else: body['subnet']['cidr'] = str(subnet_cidr) resp_body = self.network_admin_client.create_subnet(body) break except exceptions.BadRequest as e: if 'overlaps with another subnet' not in str(e): raise else: e = exceptions.BuildErrorException() e.message = 'Available CIDR for subnet creation could not be found' raise e return resp_body['subnet'] def _create_router(self, router_name, tenant_id): external_net_id = dict( network_id=CONF.network.public_network_id) if self.tempest_client: resp, resp_body = self.network_admin_client.create_router( router_name, external_gateway_info=external_net_id, tenant_id=tenant_id) else: body = {'router': {'name': router_name, 'tenant_id': tenant_id, 'external_gateway_info': external_net_id, 'admin_state_up': True}} resp_body = self.network_admin_client.create_router(body) return resp_body['router'] def _add_router_interface(self, router_id, subnet_id): if self.tempest_client: self.network_admin_client.add_router_interface_with_subnet_id( router_id, subnet_id) else: body = {'subnet_id': subnet_id} self.network_admin_client.add_interface_router(router_id, body) def get_primary_network(self): return self.isolated_net_resources.get('primary')[0] def get_primary_subnet(self): return self.isolated_net_resources.get('primary')[1] def get_primary_router(self): return self.isolated_net_resources.get('primary')[2] def get_admin_network(self): return self.isolated_net_resources.get('admin')[0] def get_admin_subnet(self): return self.isolated_net_resources.get('admin')[1] def get_admin_router(self): return self.isolated_net_resources.get('admin')[2] def get_alt_network(self): return self.isolated_net_resources.get('alt')[0] def get_alt_subnet(self): return self.isolated_net_resources.get('alt')[1] def get_alt_router(self): return self.isolated_net_resources.get('alt')[2] def get_credentials(self, credential_type): if self.isolated_creds.get(credential_type): credentials = self.isolated_creds[credential_type] else: is_admin = (credential_type == 'admin') credentials = self._create_creds(admin=is_admin) self.isolated_creds[credential_type] = credentials LOG.info("Acquired isolated creds:\n credentials: %s" % credentials) if (CONF.service_available.neutron and not CONF.baremetal.driver_enabled): network, subnet, router = self._create_network_resources( credentials.tenant_id) self.isolated_net_resources[credential_type] = ( network, subnet, router,) LOG.info("Created isolated network resources for : \n" + " credentials: %s" % credentials) return credentials def get_primary_creds(self): return self.get_credentials('primary') def get_admin_creds(self): return self.get_credentials('admin') def get_alt_creds(self): return self.get_credentials('alt') def _clear_isolated_router(self, router_id, router_name): net_client = self.network_admin_client try: net_client.delete_router(router_id) except exceptions.NotFound: LOG.warn('router with name: %s not found for delete' % router_name) def _clear_isolated_subnet(self, subnet_id, subnet_name): net_client = self.network_admin_client try: net_client.delete_subnet(subnet_id) except exceptions.NotFound: LOG.warn('subnet with name: %s not found for delete' % subnet_name) def _clear_isolated_network(self, network_id, network_name): net_client = self.network_admin_client try: net_client.delete_network(network_id) except exceptions.NotFound: LOG.warn('network with name: %s not found for delete' % network_name) def _clear_isolated_net_resources(self): net_client = self.network_admin_client for cred in self.isolated_net_resources: network, subnet, router = self.isolated_net_resources.get(cred) LOG.debug("Clearing network: %(network)s, " "subnet: %(subnet)s, router: %(router)s", {'network': network, 'subnet': subnet, 'router': router}) if (not self.network_resources or self.network_resources.get('router')): try: if self.tempest_client: net_client.remove_router_interface_with_subnet_id( router['id'], subnet['id']) else: body = {'subnet_id': subnet['id']} net_client.remove_interface_router(router['id'], body) except exceptions.NotFound: LOG.warn('router with name: %s not found for delete' % router['name']) self._clear_isolated_router(router['id'], router['name']) if (not self.network_resources or self.network_resources.get('subnet')): self._clear_isolated_subnet(subnet['id'], subnet['name']) if (not self.network_resources or self.network_resources.get('network')): self._clear_isolated_network(network['id'], network['name']) def clear_isolated_creds(self): if not self.isolated_creds: return self._clear_isolated_net_resources() for creds in self.isolated_creds.itervalues(): try: self._delete_user(creds.user_id) except exceptions.NotFound: LOG.warn("user with name: %s not found for delete" % creds.username) try: self._delete_tenant(creds.tenant_id) except exceptions.NotFound: LOG.warn("tenant with name: %s not found for delete" % creds.tenant_name)
true
true
f711f325d775ad9950782daf6d73cd89a5bd7e18
94,303
py
Python
custom-components/soco/core.py
JackJPowell/sensor.sonos_upcoming_media
d796ed5e26dfe49250b23a8632a5975df754866e
[ "Apache-2.0" ]
null
null
null
custom-components/soco/core.py
JackJPowell/sensor.sonos_upcoming_media
d796ed5e26dfe49250b23a8632a5975df754866e
[ "Apache-2.0" ]
null
null
null
custom-components/soco/core.py
JackJPowell/sensor.sonos_upcoming_media
d796ed5e26dfe49250b23a8632a5975df754866e
[ "Apache-2.0" ]
null
null
null
# pylint: disable=fixme, protected-access """The core module contains the SoCo class that implements the main entry to the SoCo functionality """ import datetime import logging import re import socket from functools import wraps from xml.sax.saxutils import escape from xml.parsers.expat import ExpatError import warnings import xmltodict import requests from requests.exceptions import ConnectionError as RequestsConnectionError from requests.exceptions import ConnectTimeout, ReadTimeout from . import config from .data_structures import ( DidlObject, DidlPlaylistContainer, DidlResource, Queue, to_didl_string, ) from .cache import Cache from .data_structures_entry import from_didl_string from .exceptions import ( SoCoSlaveException, SoCoUPnPException, NotSupportedException, SoCoNotVisibleException, ) from .groups import ZoneGroup from .music_library import MusicLibrary from .services import ( DeviceProperties, ContentDirectory, RenderingControl, AVTransport, ZoneGroupTopology, AlarmClock, SystemProperties, MusicServices, AudioIn, GroupRenderingControl, ) from .utils import really_utf8, camel_to_underscore, deprecated from .xml import XML _LOG = logging.getLogger(__name__) class _ArgsSingleton(type): """A metaclass which permits only a single instance of each derived class sharing the same `_class_group` class attribute to exist for any given set of positional arguments. Attempts to instantiate a second instance of a derived class, or another class with the same `_class_group`, with the same args will return the existing instance. For example: >>> class ArgsSingletonBase(object): ... __metaclass__ = _ArgsSingleton ... >>> class First(ArgsSingletonBase): ... _class_group = "greeting" ... def __init__(self, param): ... pass ... >>> class Second(ArgsSingletonBase): ... _class_group = "greeting" ... def __init__(self, param): ... pass >>> assert First('hi') is First('hi') >>> assert First('hi') is First('bye') AssertionError >>> assert First('hi') is Second('hi') """ _instances = {} def __call__(cls, *args, **kwargs): key = cls._class_group if hasattr(cls, "_class_group") else cls if key not in cls._instances: cls._instances[key] = {} if args not in cls._instances[key]: cls._instances[key][args] = super().__call__(*args, **kwargs) return cls._instances[key][args] class _SocoSingletonBase( # pylint: disable=too-few-public-methods,no-init _ArgsSingleton("ArgsSingletonMeta", (object,), {}) ): """The base class for the SoCo class. Uses a Python 2 and 3 compatible method of declaring a metaclass. See, eg, here: http://www.artima.com/weblogs/viewpost.jsp?thread=236234 and here: http://mikewatkins.ca/2008/11/29/python-2-and-3-metaclasses/ """ def only_on_master(function): """Decorator that raises SoCoSlaveException on master call on slave.""" @wraps(function) def inner_function(self, *args, **kwargs): """Master checking inner function.""" if not self.is_coordinator: message = ( 'The method or property "{}" can only be called/used ' "on the coordinator in a group".format(function.__name__) ) raise SoCoSlaveException(message) return function(self, *args, **kwargs) return inner_function # pylint: disable=R0904,too-many-instance-attributes class SoCo(_SocoSingletonBase): """A simple class for controlling a Sonos speaker. For any given set of arguments to __init__, only one instance of this class may be created. Subsequent attempts to create an instance with the same arguments will return the previously created instance. This means that all SoCo instances created with the same ip address are in fact the *same* SoCo instance, reflecting the real world position. .. rubric:: Basic Methods .. autosummary:: play_from_queue play play_uri pause stop end_direct_control_session seek next previous mute volume play_mode shuffle repeat cross_fade ramp_to_volume set_relative_volume get_current_track_info get_current_media_info get_speaker_info get_current_transport_info .. rubric:: Queue Management .. autosummary:: get_queue queue_size add_to_queue add_uri_to_queue add_multiple_to_queue remove_from_queue clear_queue .. rubric:: Group Management .. autosummary:: group partymode join unjoin all_groups all_zones visible_zones .. rubric:: Player Identity and Settings .. autosummary:: player_name uid household_id is_visible is_bridge is_coordinator is_soundbar bass treble loudness balance night_mode dialog_mode supports_fixed_volume fixed_volume trueplay status_light buttons_enabled .. rubric:: Playlists and Favorites .. autosummary:: get_sonos_playlists create_sonos_playlist create_sonos_playlist_from_queue remove_sonos_playlist add_item_to_sonos_playlist reorder_sonos_playlist clear_sonos_playlist move_in_sonos_playlist remove_from_sonos_playlist get_sonos_playlist_by_attr get_favorite_radio_shows get_favorite_radio_stations get_sonos_favorites .. rubric:: Miscellaneous .. autosummary:: music_source music_source_from_uri is_playing_radio is_playing_tv is_playing_line_in switch_to_line_in switch_to_tv available_actions set_sleep_timer get_sleep_timer create_stereo_pair separate_stereo_pair get_battery_info .. warning:: Properties on this object are not generally cached and may obtain information over the network, so may take longer than expected to set or return a value. It may be a good idea for you to cache the value in your own code. .. note:: Since all methods/properties on this object will result in an UPnP request, they might result in an exception without it being mentioned in the Raises section. In most cases, the exception will be a :class:`soco.exceptions.SoCoUPnPException` (if the player returns an UPnP error code), but in special cases it might also be another :class:`soco.exceptions.SoCoException` or even a `requests` exception. """ _class_group = "SoCo" # pylint: disable=super-on-old-class def __init__(self, ip_address): # Note: Creation of a SoCo instance should be as cheap and quick as # possible. Do not make any network calls here super().__init__() # Check if ip_address is a valid IPv4 representation. # Sonos does not (yet) support IPv6 try: socket.inet_aton(ip_address) except OSError as error: raise ValueError("Not a valid IP address string") from error #: The speaker's ip address self.ip_address = ip_address self.speaker_info = {} # Stores information about the current speaker # The services which we use # pylint: disable=invalid-name self.avTransport = AVTransport(self) self.contentDirectory = ContentDirectory(self) self.deviceProperties = DeviceProperties(self) self.renderingControl = RenderingControl(self) self.groupRenderingControl = GroupRenderingControl(self) self.zoneGroupTopology = ZoneGroupTopology(self) self.alarmClock = AlarmClock(self) self.systemProperties = SystemProperties(self) self.musicServices = MusicServices(self) self.audioIn = AudioIn(self) self.music_library = MusicLibrary(self) # Some private attributes self._all_zones = set() self._groups = set() self._is_bridge = None self._is_coordinator = False self._is_soundbar = None self._player_name = None self._uid = None self._household_id = None self._visible_zones = set() self._zgs_cache = Cache(default_timeout=5) self._zgs_result = None _LOG.debug("Created SoCo instance for ip: %s", ip_address) def __str__(self): return "<{} object at ip {}>".format(self.__class__.__name__, self.ip_address) def __repr__(self): return '{}("{}")'.format(self.__class__.__name__, self.ip_address) @property def player_name(self): """str: The speaker's name.""" # We could get the name like this: # result = self.deviceProperties.GetZoneAttributes() # return result["CurrentZoneName"] # but it is probably quicker to get it from the group topology # and take advantage of any caching self._parse_zone_group_state() return self._player_name @player_name.setter def player_name(self, playername): """Set the speaker's name.""" self.deviceProperties.SetZoneAttributes( [ ("DesiredZoneName", playername), ("DesiredIcon", ""), ("DesiredConfiguration", ""), ] ) @property def uid(self): """str: A unique identifier. Looks like: ``'RINCON_000XXXXXXXXXX1400'`` """ # Since this does not change over time (?) check whether we already # know the answer. If so, there is no need to go further if self._uid is not None: return self._uid # if not, we have to get it from the zone topology, which # is probably quicker than any alternative, since the zgt is probably # cached. This will set self._uid for us for next time, so we won't # have to do this again self._parse_zone_group_state() return self._uid # An alternative way of getting the uid is as follows: # self.device_description_url = \ # 'http://{0}:1400/xml/device_description.xml'.format( # self.ip_address) # response = requests.get(self.device_description_url).text # tree = XML.fromstring(response.encode('utf-8')) # udn = tree.findtext('.//{urn:schemas-upnp-org:device-1-0}UDN') # # the udn has a "uuid:" prefix before the uid, so we need to strip it # self._uid = uid = udn[5:] # return uid @property def household_id(self): """str: A unique identifier for all players in a household. Looks like: ``'Sonos_asahHKgjgJGjgjGjggjJgjJG34'`` """ # Since this does not change over time (?) check whether we already # know the answer. If so, return the cached version if self._household_id is None: self._household_id = self.deviceProperties.GetHouseholdID()[ "CurrentHouseholdID" ] return self._household_id @property def is_visible(self): """bool: Is this zone visible? A zone might be invisible if, for example, it is a bridge, or the slave part of stereo pair. """ # We could do this: # invisible = self.deviceProperties.GetInvisible()['CurrentInvisible'] # but it is better to do it in the following way, which uses the # zone group topology, to capitalise on any caching. return self in self.visible_zones @property def is_bridge(self): """bool: Is this zone a bridge?""" # Since this does not change over time (?) check whether we already # know the answer. If so, there is no need to go further if self._is_bridge is not None: return self._is_bridge # if not, we have to get it from the zone topology. This will set # self._is_bridge for us for next time, so we won't have to do this # again self._parse_zone_group_state() return self._is_bridge @property def is_coordinator(self): """bool: Is this zone a group coordinator?""" # We could do this: # invisible = self.deviceProperties.GetInvisible()['CurrentInvisible'] # but it is better to do it in the following way, which uses the # zone group topology, to capitalise on any caching. self._parse_zone_group_state() return self._is_coordinator @property def is_soundbar(self): """bool: Is this zone a soundbar (i.e. has night mode etc.)?""" if self._is_soundbar is None: if not self.speaker_info: self.get_speaker_info() model_name = self.speaker_info["model_name"].lower() self._is_soundbar = any(model_name.endswith(s) for s in SOUNDBARS) return self._is_soundbar @property def play_mode(self): """str: The queue's play mode. Case-insensitive options are: * ``'NORMAL'`` -- Turns off shuffle and repeat. * ``'REPEAT_ALL'`` -- Turns on repeat and turns off shuffle. * ``'SHUFFLE'`` -- Turns on shuffle *and* repeat. (It's strange, I know.) * ``'SHUFFLE_NOREPEAT'`` -- Turns on shuffle and turns off repeat. * ``'REPEAT_ONE'`` -- Turns on repeat one and turns off shuffle. * ``'SHUFFLE_REPEAT_ONE'`` -- Turns on shuffle *and* repeat one. (It's strange, I know.) """ result = self.avTransport.GetTransportSettings( [ ("InstanceID", 0), ] ) return result["PlayMode"] @play_mode.setter def play_mode(self, playmode): """Set the speaker's mode.""" playmode = playmode.upper() if playmode not in PLAY_MODES.keys(): raise KeyError("'%s' is not a valid play mode" % playmode) self.avTransport.SetPlayMode([("InstanceID", 0), ("NewPlayMode", playmode)]) @property def shuffle(self): """bool: The queue's shuffle option. True if enabled, False otherwise. """ return PLAY_MODES[self.play_mode][0] @shuffle.setter def shuffle(self, shuffle): """Set the queue's shuffle option.""" repeat = self.repeat self.play_mode = PLAY_MODE_BY_MEANING[(shuffle, repeat)] @property def repeat(self): """bool: The queue's repeat option. True if enabled, False otherwise. Can also be the string ``'ONE'`` for play mode ``'REPEAT_ONE'``. """ return PLAY_MODES[self.play_mode][1] @repeat.setter def repeat(self, repeat): """Set the queue's repeat option""" shuffle = self.shuffle self.play_mode = PLAY_MODE_BY_MEANING[(shuffle, repeat)] @property @only_on_master # Only for symmetry with the setter def cross_fade(self): """bool: The speaker's cross fade state. True if enabled, False otherwise """ response = self.avTransport.GetCrossfadeMode( [ ("InstanceID", 0), ] ) cross_fade_state = response["CrossfadeMode"] return bool(int(cross_fade_state)) @cross_fade.setter @only_on_master def cross_fade(self, crossfade): """Set the speaker's cross fade state.""" crossfade_value = "1" if crossfade else "0" self.avTransport.SetCrossfadeMode( [("InstanceID", 0), ("CrossfadeMode", crossfade_value)] ) def ramp_to_volume(self, volume, ramp_type="SLEEP_TIMER_RAMP_TYPE"): """Smoothly change the volume. There are three ramp types available: * ``'SLEEP_TIMER_RAMP_TYPE'`` (default): Linear ramp from the current volume up or down to the new volume. The ramp rate is 1.25 steps per second. For example: To change from volume 50 to volume 30 would take 16 seconds. * ``'ALARM_RAMP_TYPE'``: Resets the volume to zero, waits for about 30 seconds, and then ramps the volume up to the desired value at a rate of 2.5 steps per second. For example: Volume 30 would take 12 seconds for the ramp up (not considering the wait time). * ``'AUTOPLAY_RAMP_TYPE'``: Resets the volume to zero and then quickly ramps up at a rate of 50 steps per second. For example: Volume 30 will take only 0.6 seconds. The ramp rate is selected by Sonos based on the chosen ramp type and the resulting transition time returned. This method is non blocking and has no network overhead once sent. Args: volume (int): The new volume. ramp_type (str, optional): The desired ramp type, as described above. Returns: int: The ramp time in seconds, rounded down. Note that this does not include the wait time. """ response = self.renderingControl.RampToVolume( [ ("InstanceID", 0), ("Channel", "Master"), ("RampType", ramp_type), ("DesiredVolume", volume), ("ResetVolumeAfter", False), ("ProgramURI", ""), ] ) return int(response["RampTime"]) def set_relative_volume(self, relative_volume): """Adjust the volume up or down by a relative amount. If the adjustment causes the volume to overshoot the maximum value of 100, the volume will be set to 100. If the adjustment causes the volume to undershoot the minimum value of 0, the volume will be set to 0. Note that this method is an alternative to using addition and subtraction assignment operators (+=, -=) on the `volume` property of a `SoCo` instance. These operators perform the same function as `set_relative_volume` but require two network calls per operation instead of one. Args: relative_volume (int): The relative volume adjustment. Can be positive or negative. Returns: int: The new volume setting. Raises: ValueError: If ``relative_volume`` cannot be cast as an integer. """ relative_volume = int(relative_volume) # Sonos will automatically handle out-of-range adjustments response = self.renderingControl.SetRelativeVolume( [("InstanceID", 0), ("Channel", "Master"), ("Adjustment", relative_volume)] ) return int(response["NewVolume"]) @only_on_master def play_from_queue(self, index, start=True): """Play a track from the queue by index. The index number is required as an argument, where the first index is 0. Args: index (int): 0-based index of the track to play start (bool): If the item that has been set should start playing """ # Grab the speaker's information if we haven't already since we'll need # it in the next step. if not self.speaker_info: self.get_speaker_info() # first, set the queue itself as the source URI uri = "x-rincon-queue:{}#0".format(self.uid) self.avTransport.SetAVTransportURI( [("InstanceID", 0), ("CurrentURI", uri), ("CurrentURIMetaData", "")] ) # second, set the track number with a seek command self.avTransport.Seek( [("InstanceID", 0), ("Unit", "TRACK_NR"), ("Target", index + 1)] ) # finally, just play what's set if needed if start: self.play() @only_on_master def play(self): """Play the currently selected track.""" self.avTransport.Play([("InstanceID", 0), ("Speed", 1)]) @only_on_master # pylint: disable=too-many-arguments def play_uri(self, uri="", meta="", title="", start=True, force_radio=False): """Play a URI. Playing a URI will replace what was playing with the stream given by the URI. For some streams at least a title is required as metadata. This can be provided using the ``meta`` argument or the ``title`` argument. If the ``title`` argument is provided minimal metadata will be generated. If ``meta`` argument is provided the ``title`` argument is ignored. Args: uri (str): URI of the stream to be played. meta (str): The metadata to show in the player, DIDL format. title (str): The title to show in the player (if no meta). start (bool): If the URI that has been set should start playing. force_radio (bool): forces a uri to play as a radio stream. On a Sonos controller music is shown with one of the following display formats and controls: * Radio format: Shows the name of the radio station and other available data. No seek, next, previous, or voting capability. Examples: TuneIn, radioPup * Smart Radio: Shows track name, artist, and album. Limited seek, next and sometimes voting capability depending on the Music Service. Examples: Amazon Prime Stations, Pandora Radio Stations. * Track format: Shows track name, artist, and album the same as when playing from a queue. Full seek, next and previous capabilities. Examples: Spotify, Napster, Rhapsody. How it is displayed is determined by the URI prefix: ``x-sonosapi-stream:``, ``x-sonosapi-radio:``, ``x-rincon-mp3radio:``, ``hls-radio:`` default to radio or smart radio format depending on the stream. Others default to track format: ``x-file-cifs:``, ``aac:``, ``http:``, ``https:``, ``x-sonos-spotify:`` (used by Spotify), ``x-sonosapi-hls-static:`` (Amazon Prime), ``x-sonos-http:`` (Google Play & Napster). Some URIs that default to track format could be radio streams, typically ``http:``, ``https:`` or ``aac:``. To force display and controls to Radio format set ``force_radio=True`` .. note:: Other URI prefixes exist but are less common. If you have information on these please add to this doc string. .. note:: A change in Sonos® (as of at least version 6.4.2) means that the devices no longer accepts ordinary ``http:`` and ``https:`` URIs for radio stations. This method has the option to replaces these prefixes with the one that Sonos® expects: ``x-rincon-mp3radio:`` by using the "force_radio=True" parameter. A few streams may fail if not forced to to Radio format. """ if meta == "" and title != "": meta_template = ( '<DIDL-Lite xmlns:dc="http://purl.org/dc/elements' '/1.1/" xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/" ' 'xmlns:r="urn:schemas-rinconnetworks-com:metadata-1-0/" ' 'xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/">' '<item id="R:0/0/0" parentID="R:0/0" restricted="true">' "<dc:title>{title}</dc:title><upnp:class>" "object.item.audioItem.audioBroadcast</upnp:class><desc " 'id="cdudn" nameSpace="urn:schemas-rinconnetworks-com:' 'metadata-1-0/">{service}</desc></item></DIDL-Lite>' ) tunein_service = "SA_RINCON65031_" # Radio stations need to have at least a title to play meta = meta_template.format(title=escape(title), service=tunein_service) # change uri prefix to force radio style display and commands if force_radio: colon = uri.find(":") if colon > 0: uri = "x-rincon-mp3radio{}".format(uri[colon:]) self.avTransport.SetAVTransportURI( [("InstanceID", 0), ("CurrentURI", uri), ("CurrentURIMetaData", meta)] ) # The track is enqueued, now play it if needed if start: return self.play() return False @only_on_master def pause(self): """Pause the currently playing track.""" self.avTransport.Pause([("InstanceID", 0), ("Speed", 1)]) @only_on_master def stop(self): """Stop the currently playing track.""" self.avTransport.Stop([("InstanceID", 0), ("Speed", 1)]) @only_on_master def end_direct_control_session(self): """Ends all third-party controlled streaming sessions.""" self.avTransport.EndDirectControlSession([("InstanceID", 0)]) @only_on_master def seek(self, position=None, track=None): """Seek to a given position. You can seek both a relative position in the current track and a track number in the queue. It is even possible to seek to a tuple or dict containing the absolute position (relative pos. and track nr.):: t = ('0:00:00', 0) player.seek(*t) d = {'position': '0:00:00', 'track': 0} player.seek(**d) Args: position (str): The desired timestamp in the current track, specified in the format of HH:MM:SS or H:MM:SS track (int): The (zero-based) track index in the queue Raises: ValueError: If neither position nor track are specified. SoCoUPnPException: UPnP Error 701 if seeking is not supported, UPnP Error 711 if the target is invalid. Note: The 'track' parameter can only be used if the queue is currently playing. If not, use :py:meth:`play_from_queue`. This is currently faster than :py:meth:`play_from_queue` if already using the queue, as it does not reinstate the queue. If speaker is already playing it will continue to play after seek. If paused it will remain paused. """ if track is None and position is None: raise ValueError("No position or track information given") if track is not None: self.avTransport.Seek( [("InstanceID", 0), ("Unit", "TRACK_NR"), ("Target", track + 1)] ) if position is not None: if not re.match(r"^[0-9][0-9]?:[0-9][0-9]:[0-9][0-9]$", position): raise ValueError("invalid timestamp, use HH:MM:SS format") self.avTransport.Seek( [("InstanceID", 0), ("Unit", "REL_TIME"), ("Target", position)] ) @only_on_master def next(self): """Go to the next track. Keep in mind that next() can return errors for a variety of reasons. For example, if the Sonos is streaming Pandora and you call next() several times in quick succession an error code will likely be returned (since Pandora has limits on how many songs can be skipped). """ self.avTransport.Next([("InstanceID", 0), ("Speed", 1)]) @only_on_master def previous(self): """Go back to the previously played track. Keep in mind that previous() can return errors for a variety of reasons. For example, previous() will return an error code (error code 701) if the Sonos is streaming Pandora since you can't go back on tracks. """ self.avTransport.Previous([("InstanceID", 0), ("Speed", 1)]) @property def mute(self): """bool: The speaker's mute state. True if muted, False otherwise. """ response = self.renderingControl.GetMute( [("InstanceID", 0), ("Channel", "Master")] ) mute_state = response["CurrentMute"] return bool(int(mute_state)) @mute.setter def mute(self, mute): """Mute (or unmute) the speaker.""" mute_value = "1" if mute else "0" self.renderingControl.SetMute( [("InstanceID", 0), ("Channel", "Master"), ("DesiredMute", mute_value)] ) @property def volume(self): """int: The speaker's volume. An integer between 0 and 100. """ response = self.renderingControl.GetVolume( [ ("InstanceID", 0), ("Channel", "Master"), ] ) volume = response["CurrentVolume"] return int(volume) @volume.setter def volume(self, volume): """Set the speaker's volume.""" volume = int(volume) volume = max(0, min(volume, 100)) # Coerce in range self.renderingControl.SetVolume( [("InstanceID", 0), ("Channel", "Master"), ("DesiredVolume", volume)] ) @property def bass(self): """int: The speaker's bass EQ. An integer between -10 and 10. """ response = self.renderingControl.GetBass( [ ("InstanceID", 0), ("Channel", "Master"), ] ) bass = response["CurrentBass"] return int(bass) @bass.setter def bass(self, bass): """Set the speaker's bass.""" bass = int(bass) bass = max(-10, min(bass, 10)) # Coerce in range self.renderingControl.SetBass([("InstanceID", 0), ("DesiredBass", bass)]) @property def treble(self): """int: The speaker's treble EQ. An integer between -10 and 10. """ response = self.renderingControl.GetTreble( [ ("InstanceID", 0), ("Channel", "Master"), ] ) treble = response["CurrentTreble"] return int(treble) @treble.setter def treble(self, treble): """Set the speaker's treble.""" treble = int(treble) treble = max(-10, min(treble, 10)) # Coerce in range self.renderingControl.SetTreble([("InstanceID", 0), ("DesiredTreble", treble)]) @property def loudness(self): """bool: The speaker's loudness compensation. True if on, False otherwise. Loudness is a complicated topic. You can read about it on Wikipedia: https://en.wikipedia.org/wiki/Loudness """ response = self.renderingControl.GetLoudness( [ ("InstanceID", 0), ("Channel", "Master"), ] ) loudness = response["CurrentLoudness"] return bool(int(loudness)) @loudness.setter def loudness(self, loudness): """Switch on/off the speaker's loudness compensation.""" loudness_value = "1" if loudness else "0" self.renderingControl.SetLoudness( [ ("InstanceID", 0), ("Channel", "Master"), ("DesiredLoudness", loudness_value), ] ) @property def balance(self): """The left/right balance for the speaker(s). Returns: tuple: A 2-tuple (left_channel, right_channel) of integers between 0 and 100, representing the volume of each channel. E.g., (100, 100) represents full volume to both channels, whereas (100, 0) represents left channel at full volume, right channel at zero volume. """ response_lf = self.renderingControl.GetVolume( [ ("InstanceID", 0), ("Channel", "LF"), ] ) response_rf = self.renderingControl.GetVolume( [ ("InstanceID", 0), ("Channel", "RF"), ] ) volume_lf = response_lf["CurrentVolume"] volume_rf = response_rf["CurrentVolume"] return int(volume_lf), int(volume_rf) @balance.setter def balance(self, left_right_tuple): """Set the left/right balance for the speaker(s).""" left, right = left_right_tuple left = int(left) right = int(right) left = max(0, min(left, 100)) # Coerce in range right = max(0, min(right, 100)) # Coerce in range self.renderingControl.SetVolume( [("InstanceID", 0), ("Channel", "LF"), ("DesiredVolume", left)] ) self.renderingControl.SetVolume( [("InstanceID", 0), ("Channel", "RF"), ("DesiredVolume", right)] ) @property def night_mode(self): """bool: The speaker's night mode. True if on, False if off, None if not supported. """ if not self.is_soundbar: return None response = self.renderingControl.GetEQ( [("InstanceID", 0), ("EQType", "NightMode")] ) return bool(int(response["CurrentValue"])) @night_mode.setter def night_mode(self, night_mode): """Switch on/off the speaker's night mode. :param night_mode: Enable or disable night mode :type night_mode: bool :raises NotSupportedException: If the device does not support night mode. """ if not self.is_soundbar: message = "This device does not support night mode" raise NotSupportedException(message) self.renderingControl.SetEQ( [ ("InstanceID", 0), ("EQType", "NightMode"), ("DesiredValue", int(night_mode)), ] ) @property def dialog_mode(self): """bool: The speaker's dialog mode. True if on, False if off, None if not supported. """ if not self.is_soundbar: return None response = self.renderingControl.GetEQ( [("InstanceID", 0), ("EQType", "DialogLevel")] ) return bool(int(response["CurrentValue"])) @dialog_mode.setter def dialog_mode(self, dialog_mode): """Switch on/off the speaker's dialog mode. :param dialog_mode: Enable or disable dialog mode :type dialog_mode: bool :raises NotSupportedException: If the device does not support dialog mode. """ if not self.is_soundbar: message = "This device does not support dialog mode" raise NotSupportedException(message) self.renderingControl.SetEQ( [ ("InstanceID", 0), ("EQType", "DialogLevel"), ("DesiredValue", int(dialog_mode)), ] ) @property def trueplay(self): """bool: Whether Trueplay is enabled on this device. True if on, False if off. Devices that do not support Trueplay, or which do not have a current Trueplay calibration, will return `None` on getting the property, and raise a `NotSupportedException` when setting the property. Can only be set on visible devices. Attempting to set on non-visible devices will raise a `SoCoNotVisibleException`. """ response = self.renderingControl.GetRoomCalibrationStatus([("InstanceID", 0)]) if response["RoomCalibrationAvailable"] == "0": return None else: return response["RoomCalibrationEnabled"] == "1" @trueplay.setter def trueplay(self, trueplay): """Toggle the device's TruePlay setting. Only available to Sonos speakers, not the Connect, Amp, etc., and only available to speakers that have a current Trueplay calibration. :param trueplay: Enable or disable Trueplay. :type trueplay: bool :raises NotSupportedException: If the device does not support Trueplay or doesn't have a current calibration. :raises SoCoNotVisibleException: If the device is not visible. """ response = self.renderingControl.GetRoomCalibrationStatus([("InstanceID", 0)]) if response["RoomCalibrationAvailable"] == "0": raise NotSupportedException if not self.is_visible: raise SoCoNotVisibleException trueplay_value = "1" if trueplay else "0" self.renderingControl.SetRoomCalibrationStatus( [ ("InstanceID", 0), ("RoomCalibrationEnabled", trueplay_value), ] ) @property def supports_fixed_volume(self): """bool: Whether the device supports fixed volume output.""" response = self.renderingControl.GetSupportsOutputFixed([("InstanceID", 0)]) return response["CurrentSupportsFixed"] == "1" @property def fixed_volume(self): """bool: The device's fixed volume output setting. True if on, False if off. Only applicable to certain Sonos devices (Connect and Port at the time of writing). All other devices always return False. Attempting to set this property for a non-applicable device will raise a `NotSupportedException`. """ response = self.renderingControl.GetOutputFixed([("InstanceID", 0)]) return response["CurrentFixed"] == "1" @fixed_volume.setter def fixed_volume(self, fixed_volume): """Switch on/off the device's fixed volume output setting. Only applicable to certain Sonos devices. :param fixed_volume: Enable or disable fixed volume output mode. :type fixed_volume: bool :raises NotSupportedException: If the device does not support fixed volume output mode. """ try: self.renderingControl.SetOutputFixed( [ ("InstanceID", 0), ("DesiredFixed", "1" if fixed_volume else "0"), ] ) except SoCoUPnPException as error: raise NotSupportedException from error def _parse_zone_group_state(self): """The Zone Group State contains a lot of useful information. Retrieve and parse it, and populate the relevant properties. """ # zoneGroupTopology.GetZoneGroupState()['ZoneGroupState'] returns XML like # this: # # <ZoneGroups> # <ZoneGroup Coordinator="RINCON_000XXX1400" ID="RINCON_000XXXX1400:0"> # <ZoneGroupMember # BootSeq="33" # Configuration="1" # Icon="x-rincon-roomicon:zoneextender" # Invisible="1" # IsZoneBridge="1" # Location="http://192.168.1.100:1400/xml/device_description.xml" # MinCompatibleVersion="22.0-00000" # SoftwareVersion="24.1-74200" # UUID="RINCON_000ZZZ1400" # ZoneName="BRIDGE"/> # </ZoneGroup> # <ZoneGroup Coordinator="RINCON_000XXX1400" ID="RINCON_000XXX1400:46"> # <ZoneGroupMember # BootSeq="44" # Configuration="1" # Icon="x-rincon-roomicon:living" # Location="http://192.168.1.101:1400/xml/device_description.xml" # MinCompatibleVersion="22.0-00000" # SoftwareVersion="24.1-74200" # UUID="RINCON_000XXX1400" # ZoneName="Living Room"/> # <ZoneGroupMember # BootSeq="52" # Configuration="1" # Icon="x-rincon-roomicon:kitchen" # Location="http://192.168.1.102:1400/xml/device_description.xml" # MinCompatibleVersion="22.0-00000" # SoftwareVersion="24.1-74200" # UUID="RINCON_000YYY1400" # ZoneName="Kitchen"/> # </ZoneGroup> # </ZoneGroups> # def parse_zone_group_member(member_element): """Parse a ZoneGroupMember or Satellite element from Zone Group State, create a SoCo instance for the member, set basic attributes and return it.""" # Create a SoCo instance for each member. Because SoCo # instances are singletons, this is cheap if they have already # been created, and useful if they haven't. We can then # update various properties for that instance. member_attribs = member_element.attrib ip_addr = member_attribs["Location"].split("//")[1].split(":")[0] zone = config.SOCO_CLASS(ip_addr) # share our cache zone._zgs_cache = self._zgs_cache # uid doesn't change, but it's not harmful to (re)set it, in case # the zone is as yet unseen. zone._uid = member_attribs["UUID"] zone._player_name = member_attribs["ZoneName"] # add the zone to the set of all members, and to the set # of visible members if appropriate is_visible = member_attribs.get("Invisible") != "1" if is_visible: self._visible_zones.add(zone) self._all_zones.add(zone) return zone # This is called quite frequently, so it is worth optimising it. # Maintain a private cache. If the zgt has not changed, there is no # need to repeat all the XML parsing. In addition, switch on network # caching for a short interval (5 secs). zgs = self.zoneGroupTopology.GetZoneGroupState(cache=self._zgs_cache)[ "ZoneGroupState" ] if zgs == self._zgs_result: return self._zgs_result = zgs tree = XML.fromstring(zgs.encode("utf-8")) # Empty the set of all zone_groups self._groups.clear() # and the set of all members self._all_zones.clear() self._visible_zones.clear() # Loop over each ZoneGroup Element for group_element in tree.find("ZoneGroups").findall("ZoneGroup"): coordinator_uid = group_element.attrib["Coordinator"] group_uid = group_element.attrib["ID"] group_coordinator = None members = set() for member_element in group_element.findall("ZoneGroupMember"): zone = parse_zone_group_member(member_element) # Perform extra processing relevant to direct zone group # members # # If this element has the same UUID as the coordinator, it is # the coordinator if zone._uid == coordinator_uid: group_coordinator = zone zone._is_coordinator = True else: zone._is_coordinator = False # is_bridge doesn't change, but it does no real harm to # set/reset it here, just in case the zone has not been seen # before zone._is_bridge = member_element.attrib.get("IsZoneBridge") == "1" # add the zone to the members for this group members.add(zone) # Loop over Satellite elements if present, and process as for # ZoneGroup elements for satellite_element in member_element.findall("Satellite"): zone = parse_zone_group_member(satellite_element) # Assume a satellite can't be a bridge or coordinator, so # no need to check. # # Add the zone to the members for this group. members.add(zone) # Now create a ZoneGroup with this info and add it to the list # of groups self._groups.add(ZoneGroup(group_uid, group_coordinator, members)) @property def all_groups(self): """set of :class:`soco.groups.ZoneGroup`: All available groups.""" self._parse_zone_group_state() return self._groups.copy() @property def group(self): """:class:`soco.groups.ZoneGroup`: The Zone Group of which this device is a member. None if this zone is a slave in a stereo pair. """ for group in self.all_groups: if self in group: return group return None # To get the group directly from the network, try the code below # though it is probably slower than that above # current_group_id = self.zoneGroupTopology.GetZoneGroupAttributes()[ # 'CurrentZoneGroupID'] # if current_group_id: # for group in self.all_groups: # if group.uid == current_group_id: # return group # else: # return None @property def all_zones(self): """set of :class:`soco.groups.ZoneGroup`: All available zones.""" self._parse_zone_group_state() return self._all_zones.copy() @property def visible_zones(self): """set of :class:`soco.groups.ZoneGroup`: All visible zones.""" self._parse_zone_group_state() return self._visible_zones.copy() def partymode(self): """Put all the speakers in the network in the same group, a.k.a Party Mode. This blog shows the initial research responsible for this: http://blog.travelmarx.com/2010/06/exploring-sonos-via-upnp.html The trick seems to be (only tested on a two-speaker setup) to tell each speaker which to join. There's probably a bit more to it if multiple groups have been defined. """ # Tell every other visible zone to join this one # pylint: disable = expression-not-assigned [zone.join(self) for zone in self.visible_zones if zone is not self] def join(self, master): """Join this speaker to another "master" speaker.""" self.avTransport.SetAVTransportURI( [ ("InstanceID", 0), ("CurrentURI", "x-rincon:{}".format(master.uid)), ("CurrentURIMetaData", ""), ] ) self._zgs_cache.clear() def unjoin(self): """Remove this speaker from a group. Seems to work ok even if you remove what was previously the group master from it's own group. If the speaker was not in a group also returns ok. """ self.avTransport.BecomeCoordinatorOfStandaloneGroup([("InstanceID", 0)]) self._zgs_cache.clear() def create_stereo_pair(self, rh_slave_speaker): """Create a stereo pair. This speaker becomes the master, left-hand speaker of the stereo pair. The ``rh_slave_speaker`` becomes the right-hand speaker. Note that this operation will succeed on dissimilar speakers, unlike when using the official Sonos apps. Args: rh_slave_speaker (SoCo): The speaker that will be added as the right-hand, slave speaker of the stereo pair. Raises: SoCoUPnPException: if either speaker is already part of a stereo pair. """ # The pairing operation must be applied to the speaker that will # become the master (the left-hand speaker of the pair). # Note that if either speaker is part of a group, the call will # succeed. param = self.uid + ":LF,LF;" + rh_slave_speaker.uid + ":RF,RF" self.deviceProperties.AddBondedZones([("ChannelMapSet", param)]) def separate_stereo_pair(self): """Separate a stereo pair. This can be called on either the master (left-hand) speaker, or on the slave (right-hand) speaker, to create two independent zones. Raises: SoCoUPnPException: if the speaker is not a member of a stereo pair. """ self.deviceProperties.RemoveBondedZones( [("ChannelMapSet", ""), ("KeepGrouped", "0")] ) def switch_to_line_in(self, source=None): """Switch the speaker's input to line-in. Args: source (SoCo): The speaker whose line-in should be played. Default is line-in from the speaker itself. """ if source: uid = source.uid else: uid = self.uid self.avTransport.SetAVTransportURI( [ ("InstanceID", 0), ("CurrentURI", "x-rincon-stream:{}".format(uid)), ("CurrentURIMetaData", ""), ] ) @property def is_playing_radio(self): """bool: Is the speaker playing radio?""" return self.music_source == MUSIC_SRC_RADIO @property def is_playing_line_in(self): """bool: Is the speaker playing line-in?""" return self.music_source == MUSIC_SRC_LINE_IN @property def is_playing_tv(self): """bool: Is the playbar speaker input from TV?""" return self.music_source == MUSIC_SRC_TV @staticmethod def music_source_from_uri(uri): """Determine a music source from a URI. Arguments: uri (str) : The URI representing the music source Returns: str: The current source of music. Possible return values are: * ``'NONE'`` -- speaker has no music to play. * ``'LIBRARY'`` -- speaker is playing queued titles from the music library. * ``'RADIO'`` -- speaker is playing radio. * ``'WEB_FILE'`` -- speaker is playing a music file via http/https. * ``'LINE_IN'`` -- speaker is playing music from line-in. * ``'TV'`` -- speaker is playing input from TV. * ``'AIRPLAY'`` -- speaker is playing from AirPlay. * ``'UNKNOWN'`` -- any other input. The strings above can be imported as ``MUSIC_SRC_LIBRARY``, ``MUSIC_SRC_RADIO``, etc. """ for regex, source in SOURCES.items(): if re.match(regex, uri) is not None: return source return MUSIC_SRC_UNKNOWN @property def music_source(self): """str: The current music source (radio, TV, line-in, etc.). Possible return values are the same as used in `music_source_from_uri()`. """ response = self.avTransport.GetPositionInfo( [("InstanceID", 0), ("Channel", "Master")] ) return self.music_source_from_uri(response["TrackURI"]) def switch_to_tv(self): """Switch the playbar speaker's input to TV.""" self.avTransport.SetAVTransportURI( [ ("InstanceID", 0), ("CurrentURI", "x-sonos-htastream:{}:spdif".format(self.uid)), ("CurrentURIMetaData", ""), ] ) @property def status_light(self): """bool: The white Sonos status light between the mute button and the volume up button on the speaker. True if on, otherwise False. """ result = self.deviceProperties.GetLEDState() LEDState = result["CurrentLEDState"] # pylint: disable=invalid-name return LEDState == "On" @status_light.setter def status_light(self, led_on): """Switch on/off the speaker's status light.""" led_state = "On" if led_on else "Off" self.deviceProperties.SetLEDState( [ ("DesiredLEDState", led_state), ] ) @property def buttons_enabled(self): """bool: Whether the control buttons on the device are enabled. `True` if the control buttons are enabled, `False` if disabled. This property can only be set on visible speakers, and will enable or disable the buttons for all speakers in any bonded set (e.g., a stereo pair). Attempting to set it on invisible speakers (e.g., the RH speaker of a stereo pair) will raise a `SoCoNotVisibleException`. """ lock_state = self.deviceProperties.GetButtonLockState()[ "CurrentButtonLockState" ] return lock_state == "Off" @buttons_enabled.setter def buttons_enabled(self, enabled): """Enable or disable the device's control buttons. Args: bool: True to enable the buttons, False to disable. Raises: SoCoNotVisibleException: If the speaker is not visible. """ if not self.is_visible: raise SoCoNotVisibleException lock_state = "Off" if enabled else "On" self.deviceProperties.SetButtonLockState( [ ("DesiredButtonLockState", lock_state), ] ) def get_current_track_info(self): """Get information about the currently playing track. Returns: dict: A dictionary containing information about the currently playing track: playlist_position, duration, title, artist, album, position and an album_art link. If we're unable to return data for a field, we'll return an empty string. This can happen for all kinds of reasons so be sure to check values. For example, a track may not have complete metadata and be missing an album name. In this case track['album'] will be an empty string. .. note:: Calling this method on a slave in a group will not return the track the group is playing, but the last track this speaker was playing. """ response = self.avTransport.GetPositionInfo( [("InstanceID", 0), ("Channel", "Master")] ) track = { "title": "", "artist": "", "album": "", "album_art": "", "position": "", } track["playlist_position"] = response["Track"] track["duration"] = response["TrackDuration"] track["uri"] = response["TrackURI"] track["position"] = response["RelTime"] metadata = response["TrackMetaData"] # Store the entire Metadata entry in the track, this can then be # used if needed by the client to restart a given URI track["metadata"] = metadata def _parse_radio_metadata(metadata): """Try to parse trackinfo from radio metadata.""" radio_track = {} trackinfo = ( metadata.findtext( ".//{urn:schemas-rinconnetworks-com:" "metadata-1-0/}streamContent" ) or "" ) index = trackinfo.find(" - ") if index > -1: radio_track["artist"] = trackinfo[:index] radio_track["title"] = trackinfo[index + 3 :] elif "TYPE=SNG|" in trackinfo: # Examples from services: # Apple Music radio: # "TYPE=SNG|TITLE Couleurs|ARTIST M83|ALBUM Saturdays = Youth" # SiriusXM: # "BR P|TYPE=SNG|TITLE 7.15.17 LA|ARTIST Eagles|ALBUM " tags = dict([p.split(" ", 1) for p in trackinfo.split("|") if " " in p]) if tags.get("TITLE"): radio_track["title"] = tags["TITLE"] if tags.get("ARTIST"): radio_track["artist"] = tags["ARTIST"] if tags.get("ALBUM"): radio_track["album"] = tags["ALBUM"] else: # Might find some kind of title anyway in metadata radio_track["title"] = metadata.findtext( ".//{http://purl.org/dc/" "elements/1.1/}title" ) if not radio_track["title"]: radio_track["title"] = trackinfo return radio_track # Duration seems to be '0:00:00' when listening to radio if metadata != "" and track["duration"] == "0:00:00": metadata = XML.fromstring(really_utf8(metadata)) track.update(_parse_radio_metadata(metadata)) # If the speaker is playing from the line-in source, querying for track # metadata will return "NOT_IMPLEMENTED". elif metadata not in ("", "NOT_IMPLEMENTED", None): # Track metadata is returned in DIDL-Lite format metadata = XML.fromstring(really_utf8(metadata)) md_title = metadata.findtext(".//{http://purl.org/dc/elements/1.1/}title") md_artist = metadata.findtext( ".//{http://purl.org/dc/elements/1.1/}creator" ) md_album = metadata.findtext( ".//{urn:schemas-upnp-org:metadata-1-0/upnp/}album" ) track["title"] = "" if md_title: track["title"] = md_title track["artist"] = "" if md_artist: track["artist"] = md_artist track["album"] = "" if md_album: track["album"] = md_album album_art_url = metadata.findtext( ".//{urn:schemas-upnp-org:metadata-1-0/upnp/}albumArtURI" ) if album_art_url is not None: track["album_art"] = self.music_library.build_album_art_full_uri( album_art_url ) return track def get_current_media_info(self): """Get information about the currently playing media. Returns: dict: A dictionary containing information about the currently playing media: uri, channel. If we're unable to return data for a field, we'll return an empty string. """ response = self.avTransport.GetMediaInfo([("InstanceID", 0)]) media = {"uri": "", "channel": ""} media["uri"] = response["CurrentURI"] metadata = response.get("CurrentURIMetaData") if metadata: metadata = XML.fromstring(really_utf8(metadata)) md_title = metadata.findtext(".//{http://purl.org/dc/elements/1.1/}title") if md_title: media["channel"] = md_title return media def get_speaker_info(self, refresh=False, timeout=None): """Get information about the Sonos speaker. Arguments: refresh(bool): Refresh the speaker info cache. timeout: How long to wait for the server to send data before giving up, as a float, or a ``(connect timeout, read timeout)`` tuple e.g. (3, 5). Default is no timeout. Returns: dict: Information about the Sonos speaker, such as the UID, MAC Address, and Zone Name. """ if self.speaker_info and refresh is False: return self.speaker_info else: response = requests.get( "http://" + self.ip_address + ":1400/xml/device_description.xml", timeout=timeout, ) dom = XML.fromstring(response.content) device = dom.find("{urn:schemas-upnp-org:device-1-0}device") if device is not None: self.speaker_info["zone_name"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}roomName" ) # no zone icon in device_description.xml -> player icon self.speaker_info["player_icon"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}iconList/" "{urn:schemas-upnp-org:device-1-0}icon/" "{urn:schemas-upnp-org:device-1-0}url" ) self.speaker_info["uid"] = self.uid self.speaker_info["serial_number"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}serialNum" ) self.speaker_info["software_version"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}softwareVersion" ) self.speaker_info["hardware_version"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}hardwareVersion" ) self.speaker_info["model_number"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}modelNumber" ) self.speaker_info["model_name"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}modelName" ) self.speaker_info["display_version"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}displayVersion" ) # no mac address - extract from serial number mac = self.speaker_info["serial_number"].split(":")[0] self.speaker_info["mac_address"] = mac return self.speaker_info return None def get_current_transport_info(self): """Get the current playback state. Returns: dict: The following information about the speaker's playing state: * current_transport_state (``PLAYING``, ``TRANSITIONING``, ``PAUSED_PLAYBACK``, ``STOPPED``) * current_transport_status (OK, ?) * current_speed(1, ?) This allows us to know if speaker is playing or not. Don't know other states of CurrentTransportStatus and CurrentSpeed. """ response = self.avTransport.GetTransportInfo( [ ("InstanceID", 0), ] ) playstate = { "current_transport_status": "", "current_transport_state": "", "current_transport_speed": "", } playstate["current_transport_state"] = response["CurrentTransportState"] playstate["current_transport_status"] = response["CurrentTransportStatus"] playstate["current_transport_speed"] = response["CurrentSpeed"] return playstate @property @only_on_master def available_actions(self): """The transport actions that are currently available on the speaker. :returns: list: A list of strings representing the available actions, such as ['Set', 'Stop', 'Play']. Possible list items are: 'Set', 'Stop', 'Pause', 'Play', 'Next', 'Previous', 'SeekTime', 'SeekTrackNr'. """ result = self.avTransport.GetCurrentTransportActions([("InstanceID", 0)]) actions = result["Actions"] # The actions might look like 'X_DLNA_SeekTime', but we only want the # last part return [action.split("_")[-1] for action in actions.split(", ")] def get_queue(self, start=0, max_items=100, full_album_art_uri=False): """Get information about the queue. :param start: Starting number of returned matches :param max_items: Maximum number of returned matches :param full_album_art_uri: If the album art URI should include the IP address :returns: A :py:class:`~.soco.data_structures.Queue` object This method is heavily based on Sam Soffes (aka soffes) ruby implementation """ queue = [] response = self.contentDirectory.Browse( [ ("ObjectID", "Q:0"), ("BrowseFlag", "BrowseDirectChildren"), ("Filter", "*"), ("StartingIndex", start), ("RequestedCount", max_items), ("SortCriteria", ""), ] ) result = response["Result"] metadata = {} for tag in ["NumberReturned", "TotalMatches", "UpdateID"]: metadata[camel_to_underscore(tag)] = int(response[tag]) # I'm not sure this necessary (any more). Even with an empty queue, # there is still a result object. This shoud be investigated. if not result: # pylint: disable=star-args return Queue(queue, **metadata) items = from_didl_string(result) for item in items: # Check if the album art URI should be fully qualified if full_album_art_uri: self.music_library._update_album_art_to_full_uri(item) queue.append(item) # pylint: disable=star-args return Queue(queue, **metadata) @property def queue_size(self): """int: Size of the queue.""" response = self.contentDirectory.Browse( [ ("ObjectID", "Q:0"), ("BrowseFlag", "BrowseMetadata"), ("Filter", "*"), ("StartingIndex", 0), ("RequestedCount", 1), ("SortCriteria", ""), ] ) dom = XML.fromstring(really_utf8(response["Result"])) queue_size = None container = dom.find("{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}container") if container is not None: child_count = container.get("childCount") if child_count is not None: queue_size = int(child_count) return queue_size def get_sonos_playlists(self, *args, **kwargs): """Convenience method for calling ``soco.music_library.get_music_library_information('sonos_playlists')`` Refer to the docstring for that method: `get_music_library_information` """ args = tuple(["sonos_playlists"] + list(args)) return self.music_library.get_music_library_information(*args, **kwargs) @only_on_master def add_uri_to_queue(self, uri, position=0, as_next=False): """Add the URI to the queue. For arguments and return value see `add_to_queue`. """ # FIXME: The res.protocol_info should probably represent the mime type # etc of the uri. But this seems OK. res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")] item = DidlObject(resources=res, title="", parent_id="", item_id="") return self.add_to_queue(item, position, as_next) @only_on_master def add_to_queue(self, queueable_item, position=0, as_next=False): """Add a queueable item to the queue. Args: queueable_item (DidlObject or MusicServiceItem): The item to be added to the queue position (int): The index (1-based) at which the URI should be added. Default is 0 (add URI at the end of the queue). as_next (bool): Whether this URI should be played as the next track in shuffle mode. This only works if ``play_mode=SHUFFLE``. Returns: int: The index of the new item in the queue. """ metadata = to_didl_string(queueable_item) response = self.avTransport.AddURIToQueue( [ ("InstanceID", 0), ("EnqueuedURI", queueable_item.resources[0].uri), ("EnqueuedURIMetaData", metadata), ("DesiredFirstTrackNumberEnqueued", position), ("EnqueueAsNext", int(as_next)), ] ) qnumber = response["FirstTrackNumberEnqueued"] return int(qnumber) def add_multiple_to_queue(self, items, container=None): """Add a sequence of items to the queue. Args: items (list): A sequence of items to the be added to the queue container (DidlObject, optional): A container object which includes the items. """ if container is not None: container_uri = container.resources[0].uri container_metadata = to_didl_string(container) else: container_uri = "" # Sonos seems to accept this as well container_metadata = "" # pylint: disable=redefined-variable-type chunk_size = 16 # With each request, we can only add 16 items item_list = list(items) # List for slicing for index in range(0, len(item_list), chunk_size): chunk = item_list[index : index + chunk_size] uris = " ".join([item.resources[0].uri for item in chunk]) uri_metadata = " ".join([to_didl_string(item) for item in chunk]) self.avTransport.AddMultipleURIsToQueue( [ ("InstanceID", 0), ("UpdateID", 0), ("NumberOfURIs", len(chunk)), ("EnqueuedURIs", uris), ("EnqueuedURIsMetaData", uri_metadata), ("ContainerURI", container_uri), ("ContainerMetaData", container_metadata), ("DesiredFirstTrackNumberEnqueued", 0), ("EnqueueAsNext", 0), ] ) @only_on_master def remove_from_queue(self, index): """Remove a track from the queue by index. The index number is required as an argument, where the first index is 0. Args: index (int): The (0-based) index of the track to remove """ # TODO: what do these parameters actually do? updid = "0" objid = "Q:0/" + str(index + 1) self.avTransport.RemoveTrackFromQueue( [ ("InstanceID", 0), ("ObjectID", objid), ("UpdateID", updid), ] ) @only_on_master def clear_queue(self): """Remove all tracks from the queue.""" self.avTransport.RemoveAllTracksFromQueue( [ ("InstanceID", 0), ] ) @deprecated("0.13", "soco.music_library.get_favorite_radio_shows", "0.15", True) def get_favorite_radio_shows(self, start=0, max_items=100): """Get favorite radio shows from Sonos' Radio app. Returns: dict: A dictionary containing the total number of favorites, the number of favorites returned, and the actual list of favorite radio shows, represented as a dictionary with ``'title'`` and ``'uri'`` keys. Depending on what you're building, you'll want to check to see if the total number of favorites is greater than the amount you requested (``max_items``), if it is, use ``start`` to page through and get the entire list of favorites. """ message = ( "The output type of this method will probably change in " "the future to use SoCo data structures" ) warnings.warn(message, stacklevel=2) return self.__get_favorites(RADIO_SHOWS, start, max_items) @deprecated("0.13", "soco.music_library.get_favorite_radio_stations", "0.15", True) def get_favorite_radio_stations(self, start=0, max_items=100): """Get favorite radio stations from Sonos' Radio app. See :meth:`get_favorite_radio_shows` for return type and remarks. """ message = ( "The output type of this method will probably change in " "the future to use SoCo data structures" ) warnings.warn(message, stacklevel=2) return self.__get_favorites(RADIO_STATIONS, start, max_items) @deprecated("0.13", "soco.music_library.get_sonos_favorites", "0.15", True) def get_sonos_favorites(self, start=0, max_items=100): """Get Sonos favorites. See :meth:`get_favorite_radio_shows` for return type and remarks. """ message = ( "The output type of this method will probably change in " "the future to use SoCo data structures" ) warnings.warn(message, stacklevel=2) return self.__get_favorites(SONOS_FAVORITES, start, max_items) def __get_favorites(self, favorite_type, start=0, max_items=100): """Helper method for `get_favorite_radio_*` methods. Args: favorite_type (str): Specify either `RADIO_STATIONS` or `RADIO_SHOWS`. start (int): Which number to start the retrieval from. Used for paging. max_items (int): The total number of results to return. """ if favorite_type not in (RADIO_SHOWS, RADIO_STATIONS): favorite_type = SONOS_FAVORITES response = self.contentDirectory.Browse( [ ( "ObjectID", "FV:2" if favorite_type is SONOS_FAVORITES else "R:0/{}".format(favorite_type), ), ("BrowseFlag", "BrowseDirectChildren"), ("Filter", "*"), ("StartingIndex", start), ("RequestedCount", max_items), ("SortCriteria", ""), ] ) result = {} favorites = [] results_xml = response["Result"] if results_xml != "": # Favorites are returned in DIDL-Lite format metadata = XML.fromstring(really_utf8(results_xml)) for item in metadata.findall( "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}container" if favorite_type == RADIO_SHOWS else "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}item" ): favorite = {} favorite["title"] = item.findtext( "{http://purl.org/dc/elements/1.1/}title" ) favorite["uri"] = item.findtext( "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}res" ) if favorite_type == SONOS_FAVORITES: favorite["meta"] = item.findtext( "{urn:schemas-rinconnetworks-com:metadata-1-0/}resMD" ) favorites.append(favorite) result["total"] = response["TotalMatches"] result["returned"] = len(favorites) result["favorites"] = favorites return result def create_sonos_playlist(self, title): """Create a new empty Sonos playlist. Args: title: Name of the playlist :rtype: :py:class:`~.soco.data_structures.DidlPlaylistContainer` """ response = self.avTransport.CreateSavedQueue( [ ("InstanceID", 0), ("Title", title), ("EnqueuedURI", ""), ("EnqueuedURIMetaData", ""), ] ) item_id = response["AssignedObjectID"] obj_id = item_id.split(":", 2)[1] uri = "file:///jffs/settings/savedqueues.rsq#{}".format(obj_id) res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")] return DidlPlaylistContainer( resources=res, title=title, parent_id="SQ:", item_id=item_id ) @only_on_master # pylint: disable=invalid-name def create_sonos_playlist_from_queue(self, title): """Create a new Sonos playlist from the current queue. Args: title: Name of the playlist :rtype: :py:class:`~.soco.data_structures.DidlPlaylistContainer` """ # Note: probably same as Queue service method SaveAsSonosPlaylist # but this has not been tested. This method is what the # controller uses. response = self.avTransport.SaveQueue( [("InstanceID", 0), ("Title", title), ("ObjectID", "")] ) item_id = response["AssignedObjectID"] obj_id = item_id.split(":", 2)[1] uri = "file:///jffs/settings/savedqueues.rsq#{}".format(obj_id) res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")] return DidlPlaylistContainer( resources=res, title=title, parent_id="SQ:", item_id=item_id ) @only_on_master def remove_sonos_playlist(self, sonos_playlist): """Remove a Sonos playlist. Args: sonos_playlist (DidlPlaylistContainer): Sonos playlist to remove or the item_id (str). Returns: bool: True if succesful, False otherwise Raises: SoCoUPnPException: If sonos_playlist does not point to a valid object. """ object_id = getattr(sonos_playlist, "item_id", sonos_playlist) return self.contentDirectory.DestroyObject([("ObjectID", object_id)]) def add_item_to_sonos_playlist(self, queueable_item, sonos_playlist): """Adds a queueable item to a Sonos' playlist. Args: queueable_item (DidlObject): the item to add to the Sonos' playlist sonos_playlist (DidlPlaylistContainer): the Sonos' playlist to which the item should be added """ # Get the update_id for the playlist response, _ = self.music_library._music_lib_search(sonos_playlist.item_id, 0, 1) update_id = response["UpdateID"] # Form the metadata for queueable_item metadata = to_didl_string(queueable_item) # Make the request self.avTransport.AddURIToSavedQueue( [ ("InstanceID", 0), ("UpdateID", update_id), ("ObjectID", sonos_playlist.item_id), ("EnqueuedURI", queueable_item.resources[0].uri), ("EnqueuedURIMetaData", metadata), # 2 ** 32 - 1 = 4294967295, this field has always this value. Most # likely, playlist positions are represented as a 32 bit uint and # this is therefore the largest index possible. Asking to add at # this index therefore probably amounts to adding it "at the end" ("AddAtIndex", 4294967295), ] ) @only_on_master def set_sleep_timer(self, sleep_time_seconds): """Sets the sleep timer. Args: sleep_time_seconds (int or NoneType): How long to wait before turning off speaker in seconds, None to cancel a sleep timer. Maximum value of 86399 Raises: SoCoException: Upon errors interacting with Sonos controller ValueError: Argument/Syntax errors """ # Note: A value of None for sleep_time_seconds is valid, and needs to # be preserved distinctly separate from 0. 0 means go to sleep now, # which will immediately start the sound tappering, and could be a # useful feature, while None means cancel the current timer try: if sleep_time_seconds is None: sleep_time = "" else: sleep_time = format(datetime.timedelta(seconds=int(sleep_time_seconds))) self.avTransport.ConfigureSleepTimer( [ ("InstanceID", 0), ("NewSleepTimerDuration", sleep_time), ] ) except SoCoUPnPException as err: if "Error 402 received" in str(err): raise ValueError( "invalid sleep_time_seconds, must be integer \ value between 0 and 86399 inclusive or None" ) from err raise except ValueError as error: raise ValueError( "invalid sleep_time_seconds, must be integer \ value between 0 and 86399 inclusive or None" ) from error @only_on_master def get_sleep_timer(self): """Retrieves remaining sleep time, if any Returns: int or NoneType: Number of seconds left in timer. If there is no sleep timer currently set it will return None. """ resp = self.avTransport.GetRemainingSleepTimerDuration( [ ("InstanceID", 0), ] ) if resp["RemainingSleepTimerDuration"]: times = resp["RemainingSleepTimerDuration"].split(":") return int(times[0]) * 3600 + int(times[1]) * 60 + int(times[2]) else: return None @only_on_master def reorder_sonos_playlist(self, sonos_playlist, tracks, new_pos, update_id=0): """Reorder and/or Remove tracks in a Sonos playlist. The underlying call is quite complex as it can both move a track within the list or delete a track from the playlist. All of this depends on what tracks and new_pos specify. If a list is specified for tracks, then a list must be used for new_pos. Each list element is a discrete modification and the next list operation must anticipate the new state of the playlist. If a comma formatted string to tracks is specified, then use a similiar string to specify new_pos. Those operations should be ordered from the end of the list to the beginning See the helper methods :py:meth:`clear_sonos_playlist`, :py:meth:`move_in_sonos_playlist`, :py:meth:`remove_from_sonos_playlist` for simplified usage. update_id - If you have a series of operations, tracking the update_id and setting it, will save a lookup operation. Examples: To reorder the first two tracks:: # sonos_playlist specified by the DidlPlaylistContainer object sonos_playlist = device.get_sonos_playlists()[0] device.reorder_sonos_playlist(sonos_playlist, tracks=[0, ], new_pos=[1, ]) # OR specified by the item_id device.reorder_sonos_playlist('SQ:0', tracks=[0, ], new_pos=[1, ]) To delete the second track:: # tracks/new_pos are a list of int device.reorder_sonos_playlist(sonos_playlist, tracks=[1, ], new_pos=[None, ]) # OR tracks/new_pos are a list of int-like device.reorder_sonos_playlist(sonos_playlist, tracks=['1', ], new_pos=['', ]) # OR tracks/new_pos are strings - no transform is done device.reorder_sonos_playlist(sonos_playlist, tracks='1', new_pos='') To reverse the order of a playlist with 4 items:: device.reorder_sonos_playlist(sonos_playlist, tracks='3,2,1,0', new_pos='0,1,2,3') Args: sonos_playlist (:py:class:`~.soco.data_structures.DidlPlaylistContainer`): The Sonos playlist object or the item_id (str) of the Sonos playlist. tracks: (list): list of track indices(int) to reorder. May also be a list of int like things. i.e. ``['0', '1',]`` OR it may be a str of comma separated int like things. ``"0,1"``. Tracks are **0**-based. Meaning the first track is track 0, just like indexing into a Python list. new_pos (list): list of new positions (int|None) corresponding to track_list. MUST be the same type as ``tracks``. **0**-based, see tracks above. ``None`` is the indicator to remove the track. If using a list of strings, then a remove is indicated by an empty string. update_id (int): operation id (default: 0) If set to 0, a lookup is done to find the correct value. Returns: dict: Which contains 3 elements: change, length and update_id. Change in size between original playlist and the resulting playlist, the length of resulting playlist, and the new update_id. Raises: SoCoUPnPException: If playlist does not exist or if your tracks and/or new_pos arguments are invalid. """ # allow either a string 'SQ:10' or an object with item_id attribute. object_id = getattr(sonos_playlist, "item_id", sonos_playlist) if isinstance(tracks, str): track_list = [ tracks, ] position_list = [ new_pos, ] elif isinstance(tracks, int): track_list = [ tracks, ] if new_pos is None: new_pos = "" position_list = [ new_pos, ] else: track_list = [str(x) for x in tracks] position_list = [str(x) if x is not None else "" for x in new_pos] # track_list = ','.join(track_list) # position_list = ','.join(position_list) if update_id == 0: # retrieve the update id for the object response, _ = self.music_library._music_lib_search(object_id, 0, 1) update_id = response["UpdateID"] change = 0 for track, position in zip(track_list, position_list): if track == position: # there is no move, a no-op continue response = self.avTransport.ReorderTracksInSavedQueue( [ ("InstanceID", 0), ("ObjectID", object_id), ("UpdateID", update_id), ("TrackList", track), ("NewPositionList", position), ] ) change += int(response["QueueLengthChange"]) update_id = int(response["NewUpdateID"]) length = int(response["NewQueueLength"]) response = {"change": change, "update_id": update_id, "length": length} return response @only_on_master def clear_sonos_playlist(self, sonos_playlist, update_id=0): """Clear all tracks from a Sonos playlist. This is a convenience method for :py:meth:`reorder_sonos_playlist`. Example:: device.clear_sonos_playlist(sonos_playlist) Args: sonos_playlist (:py:class:`~.soco.data_structures.DidlPlaylistContainer`): Sonos playlist object or the item_id (str) of the Sonos playlist. update_id (int): Optional update counter for the object. If left at the default of 0, it will be looked up. Returns: dict: See :py:meth:`reorder_sonos_playlist` Raises: ValueError: If sonos_playlist specified by string and is not found. SoCoUPnPException: See :py:meth:`reorder_sonos_playlist` """ if not isinstance(sonos_playlist, DidlPlaylistContainer): sonos_playlist = self.get_sonos_playlist_by_attr("item_id", sonos_playlist) count = self.music_library.browse(ml_item=sonos_playlist).total_matches tracks = ",".join([str(x) for x in range(count)]) if tracks: return self.reorder_sonos_playlist( sonos_playlist, tracks=tracks, new_pos="", update_id=update_id ) else: return {"change": 0, "update_id": update_id, "length": count} @only_on_master def move_in_sonos_playlist(self, sonos_playlist, track, new_pos, update_id=0): """Move a track to a new position within a Sonos Playlist. This is a convenience method for :py:meth:`reorder_sonos_playlist`. Example:: device.move_in_sonos_playlist(sonos_playlist, track=0, new_pos=1) Args: sonos_playlist (:py:class:`~.soco.data_structures.DidlPlaylistContainer`): Sonos playlist object or the item_id (str) of the Sonos playlist. track (int): **0**-based position of the track to move. The first track is track 0, just like indexing into a Python list. new_pos (int): **0**-based location to move the track. update_id (int): Optional update counter for the object. If left at the default of 0, it will be looked up. Returns: dict: See :py:meth:`reorder_sonos_playlist` Raises: SoCoUPnPException: See :py:meth:`reorder_sonos_playlist` """ return self.reorder_sonos_playlist( sonos_playlist, int(track), int(new_pos), update_id ) @only_on_master def remove_from_sonos_playlist(self, sonos_playlist, track, update_id=0): """Remove a track from a Sonos Playlist. This is a convenience method for :py:meth:`reorder_sonos_playlist`. Example:: device.remove_from_sonos_playlist(sonos_playlist, track=0) Args: sonos_playlist (:py:class:`~.soco.data_structures.DidlPlaylistContainer`): Sonos playlist object or the item_id (str) of the Sonos playlist. track (int): *0**-based position of the track to move. The first track is track 0, just like indexing into a Python list. update_id (int): Optional update counter for the object. If left at the default of 0, it will be looked up. Returns: dict: See :py:meth:`reorder_sonos_playlist` Raises: SoCoUPnPException: See :py:meth:`reorder_sonos_playlist` """ return self.reorder_sonos_playlist(sonos_playlist, int(track), None, update_id) @only_on_master def get_sonos_playlist_by_attr(self, attr_name, match): """Return the first Sonos Playlist DidlPlaylistContainer that matches the attribute specified. Args: attr_name (str): DidlPlaylistContainer attribute to compare. The most useful being: 'title' and 'item_id'. match (str): Value to match. Returns: (:class:`~.soco.data_structures.DidlPlaylistContainer`): The first matching playlist object. Raises: (AttributeError): If indicated attribute name does not exist. (ValueError): If a match can not be found. Example:: device.get_sonos_playlist_by_attr('title', 'Foo') device.get_sonos_playlist_by_attr('item_id', 'SQ:3') """ for sonos_playlist in self.get_sonos_playlists(): if getattr(sonos_playlist, attr_name) == match: return sonos_playlist raise ValueError('No match on "{}" for value "{}"'.format(attr_name, match)) def get_battery_info(self, timeout=3.0): """Get battery information for a Sonos speaker. Obtains battery information for Sonos speakers that report it. This only applies to Sonos Move speakers at the time of writing. This method may only work on Sonos 'S2' systems. Args: timeout (float, optional): The timeout to use when making the HTTP request. Returns: dict: A `dict` containing battery status data. Example return value:: {'Health': 'GREEN', 'Level': 100, 'Temperature': 'NORMAL', 'PowerSource': 'SONOS_CHARGING_RING'} Raises: NotSupportedException: If the speaker does not report battery information. ConnectionError: If the HTTP connection failed, or returned an unsuccessful status code. TimeoutError: If making the HTTP connection, or reading the response, timed out. """ # Retrieve information from the speaker's status URL try: response = requests.get( "http://" + self.ip_address + ":1400/status/batterystatus", timeout=timeout, ) except (ConnectTimeout, ReadTimeout) as error: raise TimeoutError from error except RequestsConnectionError as error: raise ConnectionError from error if response.status_code != 200: raise ConnectionError # Convert the XML response and traverse to obtain the battery information battery_info = {} try: zp_info = xmltodict.parse(response.text)["ZPSupportInfo"] for info_item in zp_info["LocalBatteryStatus"]["Data"]: battery_info[info_item["@name"]] = info_item["#text"] try: battery_info["Level"] = int(battery_info["Level"]) except (KeyError, ValueError): pass except (KeyError, ExpatError, TypeError) as error: # Battery information not supported raise NotSupportedException from error return battery_info # definition section RADIO_STATIONS = 0 RADIO_SHOWS = 1 SONOS_FAVORITES = 2 NS = { "dc": "{http://purl.org/dc/elements/1.1/}", "upnp": "{urn:schemas-upnp-org:metadata-1-0/upnp/}", "": "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}", } # Valid play modes and their meanings as (shuffle, repeat) tuples PLAY_MODES = { "NORMAL": (False, False), "SHUFFLE_NOREPEAT": (True, False), "SHUFFLE": (True, True), "REPEAT_ALL": (False, True), "SHUFFLE_REPEAT_ONE": (True, "ONE"), "REPEAT_ONE": (False, "ONE"), } # Inverse mapping of PLAY_MODES PLAY_MODE_BY_MEANING = {meaning: mode for mode, meaning in PLAY_MODES.items()} # Music source names MUSIC_SRC_LIBRARY = "LIBRARY" MUSIC_SRC_RADIO = "RADIO" MUSIC_SRC_WEB_FILE = "WEB_FILE" MUSIC_SRC_LINE_IN = "LINE_IN" MUSIC_SRC_TV = "TV" MUSIC_SRC_AIRPLAY = "AIRPLAY" MUSIC_SRC_UNKNOWN = "UNKNOWN" MUSIC_SRC_NONE = "NONE" # URI prefixes for music sources SOURCES = { r"^$": MUSIC_SRC_NONE, r"^x-file-cifs:": MUSIC_SRC_LIBRARY, r"^x-rincon-mp3radio:": MUSIC_SRC_RADIO, r"^x-sonosapi-stream:": MUSIC_SRC_RADIO, r"^x-sonosapi-radio:": MUSIC_SRC_RADIO, r"^x-sonosapi-hls:": MUSIC_SRC_RADIO, r"^aac:": MUSIC_SRC_RADIO, r"^hls-radio:": MUSIC_SRC_RADIO, r"^https?:": MUSIC_SRC_WEB_FILE, r"^x-rincon-stream:": MUSIC_SRC_LINE_IN, r"^x-sonos-htastream:": MUSIC_SRC_TV, r"^x-sonos-vli:.*,airplay:": MUSIC_SRC_AIRPLAY, } # Soundbar product names SOUNDBARS = ("playbase", "playbar", "beam", "sonos amp", "arc", "arc sl") if config.SOCO_CLASS is None: config.SOCO_CLASS = SoCo
36.565723
88
0.586641
import datetime import logging import re import socket from functools import wraps from xml.sax.saxutils import escape from xml.parsers.expat import ExpatError import warnings import xmltodict import requests from requests.exceptions import ConnectionError as RequestsConnectionError from requests.exceptions import ConnectTimeout, ReadTimeout from . import config from .data_structures import ( DidlObject, DidlPlaylistContainer, DidlResource, Queue, to_didl_string, ) from .cache import Cache from .data_structures_entry import from_didl_string from .exceptions import ( SoCoSlaveException, SoCoUPnPException, NotSupportedException, SoCoNotVisibleException, ) from .groups import ZoneGroup from .music_library import MusicLibrary from .services import ( DeviceProperties, ContentDirectory, RenderingControl, AVTransport, ZoneGroupTopology, AlarmClock, SystemProperties, MusicServices, AudioIn, GroupRenderingControl, ) from .utils import really_utf8, camel_to_underscore, deprecated from .xml import XML _LOG = logging.getLogger(__name__) class _ArgsSingleton(type): _instances = {} def __call__(cls, *args, **kwargs): key = cls._class_group if hasattr(cls, "_class_group") else cls if key not in cls._instances: cls._instances[key] = {} if args not in cls._instances[key]: cls._instances[key][args] = super().__call__(*args, **kwargs) return cls._instances[key][args] class _SocoSingletonBase( _ArgsSingleton("ArgsSingletonMeta", (object,), {}) ): def only_on_master(function): @wraps(function) def inner_function(self, *args, **kwargs): if not self.is_coordinator: message = ( 'The method or property "{}" can only be called/used ' "on the coordinator in a group".format(function.__name__) ) raise SoCoSlaveException(message) return function(self, *args, **kwargs) return inner_function class SoCo(_SocoSingletonBase): _class_group = "SoCo" def __init__(self, ip_address): super().__init__() try: socket.inet_aton(ip_address) except OSError as error: raise ValueError("Not a valid IP address string") from error self.ip_address = ip_address self.speaker_info = {} # Stores information about the current speaker # The services which we use # pylint: disable=invalid-name self.avTransport = AVTransport(self) self.contentDirectory = ContentDirectory(self) self.deviceProperties = DeviceProperties(self) self.renderingControl = RenderingControl(self) self.groupRenderingControl = GroupRenderingControl(self) self.zoneGroupTopology = ZoneGroupTopology(self) self.alarmClock = AlarmClock(self) self.systemProperties = SystemProperties(self) self.musicServices = MusicServices(self) self.audioIn = AudioIn(self) self.music_library = MusicLibrary(self) # Some private attributes self._all_zones = set() self._groups = set() self._is_bridge = None self._is_coordinator = False self._is_soundbar = None self._player_name = None self._uid = None self._household_id = None self._visible_zones = set() self._zgs_cache = Cache(default_timeout=5) self._zgs_result = None _LOG.debug("Created SoCo instance for ip: %s", ip_address) def __str__(self): return "<{} object at ip {}>".format(self.__class__.__name__, self.ip_address) def __repr__(self): return '{}("{}")'.format(self.__class__.__name__, self.ip_address) @property def player_name(self): # We could get the name like this: # result = self.deviceProperties.GetZoneAttributes() # return result["CurrentZoneName"] # but it is probably quicker to get it from the group topology # and take advantage of any caching self._parse_zone_group_state() return self._player_name @player_name.setter def player_name(self, playername): self.deviceProperties.SetZoneAttributes( [ ("DesiredZoneName", playername), ("DesiredIcon", ""), ("DesiredConfiguration", ""), ] ) @property def uid(self): # Since this does not change over time (?) check whether we already # know the answer. If so, there is no need to go further if self._uid is not None: return self._uid # if not, we have to get it from the zone topology, which # is probably quicker than any alternative, since the zgt is probably # cached. This will set self._uid for us for next time, so we won't self._parse_zone_group_state() return self._uid if self._household_id is None: self._household_id = self.deviceProperties.GetHouseholdID()[ "CurrentHouseholdID" ] return self._household_id @property def is_visible(self): return self in self.visible_zones @property def is_bridge(self): if self._is_bridge is not None: return self._is_bridge # again self._parse_zone_group_state() return self._is_bridge @property def is_coordinator(self): # We could do this: # invisible = self.deviceProperties.GetInvisible()['CurrentInvisible'] # but it is better to do it in the following way, which uses the # zone group topology, to capitalise on any caching. self._parse_zone_group_state() return self._is_coordinator @property def is_soundbar(self): if self._is_soundbar is None: if not self.speaker_info: self.get_speaker_info() model_name = self.speaker_info["model_name"].lower() self._is_soundbar = any(model_name.endswith(s) for s in SOUNDBARS) return self._is_soundbar @property def play_mode(self): result = self.avTransport.GetTransportSettings( [ ("InstanceID", 0), ] ) return result["PlayMode"] @play_mode.setter def play_mode(self, playmode): playmode = playmode.upper() if playmode not in PLAY_MODES.keys(): raise KeyError("'%s' is not a valid play mode" % playmode) self.avTransport.SetPlayMode([("InstanceID", 0), ("NewPlayMode", playmode)]) @property def shuffle(self): return PLAY_MODES[self.play_mode][0] @shuffle.setter def shuffle(self, shuffle): repeat = self.repeat self.play_mode = PLAY_MODE_BY_MEANING[(shuffle, repeat)] @property def repeat(self): return PLAY_MODES[self.play_mode][1] @repeat.setter def repeat(self, repeat): shuffle = self.shuffle self.play_mode = PLAY_MODE_BY_MEANING[(shuffle, repeat)] @property @only_on_master # Only for symmetry with the setter def cross_fade(self): response = self.avTransport.GetCrossfadeMode( [ ("InstanceID", 0), ] ) cross_fade_state = response["CrossfadeMode"] return bool(int(cross_fade_state)) @cross_fade.setter @only_on_master def cross_fade(self, crossfade): crossfade_value = "1" if crossfade else "0" self.avTransport.SetCrossfadeMode( [("InstanceID", 0), ("CrossfadeMode", crossfade_value)] ) def ramp_to_volume(self, volume, ramp_type="SLEEP_TIMER_RAMP_TYPE"): response = self.renderingControl.RampToVolume( [ ("InstanceID", 0), ("Channel", "Master"), ("RampType", ramp_type), ("DesiredVolume", volume), ("ResetVolumeAfter", False), ("ProgramURI", ""), ] ) return int(response["RampTime"]) def set_relative_volume(self, relative_volume): relative_volume = int(relative_volume) # Sonos will automatically handle out-of-range adjustments response = self.renderingControl.SetRelativeVolume( [("InstanceID", 0), ("Channel", "Master"), ("Adjustment", relative_volume)] ) return int(response["NewVolume"]) @only_on_master def play_from_queue(self, index, start=True): # Grab the speaker's information if we haven't already since we'll need if not self.speaker_info: self.get_speaker_info() uri = "x-rincon-queue:{}#0".format(self.uid) self.avTransport.SetAVTransportURI( [("InstanceID", 0), ("CurrentURI", uri), ("CurrentURIMetaData", "")] ) self.avTransport.Seek( [("InstanceID", 0), ("Unit", "TRACK_NR"), ("Target", index + 1)] ) if start: self.play() @only_on_master def play(self): self.avTransport.Play([("InstanceID", 0), ("Speed", 1)]) @only_on_master # pylint: disable=too-many-arguments def play_uri(self, uri="", meta="", title="", start=True, force_radio=False): if meta == "" and title != "": meta_template = ( '<DIDL-Lite xmlns:dc="http://purl.org/dc/elements' '/1.1/" xmlns:upnp="urn:schemas-upnp-org:metadata-1-0/upnp/" ' 'xmlns:r="urn:schemas-rinconnetworks-com:metadata-1-0/" ' 'xmlns="urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/">' '<item id="R:0/0/0" parentID="R:0/0" restricted="true">' "<dc:title>{title}</dc:title><upnp:class>" "object.item.audioItem.audioBroadcast</upnp:class><desc " 'id="cdudn" nameSpace="urn:schemas-rinconnetworks-com:' 'metadata-1-0/">{service}</desc></item></DIDL-Lite>' ) tunein_service = "SA_RINCON65031_" # Radio stations need to have at least a title to play meta = meta_template.format(title=escape(title), service=tunein_service) # change uri prefix to force radio style display and commands if force_radio: colon = uri.find(":") if colon > 0: uri = "x-rincon-mp3radio{}".format(uri[colon:]) self.avTransport.SetAVTransportURI( [("InstanceID", 0), ("CurrentURI", uri), ("CurrentURIMetaData", meta)] ) # The track is enqueued, now play it if needed if start: return self.play() return False @only_on_master def pause(self): self.avTransport.Pause([("InstanceID", 0), ("Speed", 1)]) @only_on_master def stop(self): self.avTransport.Stop([("InstanceID", 0), ("Speed", 1)]) @only_on_master def end_direct_control_session(self): self.avTransport.EndDirectControlSession([("InstanceID", 0)]) @only_on_master def seek(self, position=None, track=None): if track is None and position is None: raise ValueError("No position or track information given") if track is not None: self.avTransport.Seek( [("InstanceID", 0), ("Unit", "TRACK_NR"), ("Target", track + 1)] ) if position is not None: if not re.match(r"^[0-9][0-9]?:[0-9][0-9]:[0-9][0-9]$", position): raise ValueError("invalid timestamp, use HH:MM:SS format") self.avTransport.Seek( [("InstanceID", 0), ("Unit", "REL_TIME"), ("Target", position)] ) @only_on_master def next(self): self.avTransport.Next([("InstanceID", 0), ("Speed", 1)]) @only_on_master def previous(self): self.avTransport.Previous([("InstanceID", 0), ("Speed", 1)]) @property def mute(self): response = self.renderingControl.GetMute( [("InstanceID", 0), ("Channel", "Master")] ) mute_state = response["CurrentMute"] return bool(int(mute_state)) @mute.setter def mute(self, mute): mute_value = "1" if mute else "0" self.renderingControl.SetMute( [("InstanceID", 0), ("Channel", "Master"), ("DesiredMute", mute_value)] ) @property def volume(self): response = self.renderingControl.GetVolume( [ ("InstanceID", 0), ("Channel", "Master"), ] ) volume = response["CurrentVolume"] return int(volume) @volume.setter def volume(self, volume): volume = int(volume) volume = max(0, min(volume, 100)) # Coerce in range self.renderingControl.SetVolume( [("InstanceID", 0), ("Channel", "Master"), ("DesiredVolume", volume)] ) @property def bass(self): response = self.renderingControl.GetBass( [ ("InstanceID", 0), ("Channel", "Master"), ] ) bass = response["CurrentBass"] return int(bass) @bass.setter def bass(self, bass): bass = int(bass) bass = max(-10, min(bass, 10)) # Coerce in range self.renderingControl.SetBass([("InstanceID", 0), ("DesiredBass", bass)]) @property def treble(self): response = self.renderingControl.GetTreble( [ ("InstanceID", 0), ("Channel", "Master"), ] ) treble = response["CurrentTreble"] return int(treble) @treble.setter def treble(self, treble): treble = int(treble) treble = max(-10, min(treble, 10)) # Coerce in range self.renderingControl.SetTreble([("InstanceID", 0), ("DesiredTreble", treble)]) @property def loudness(self): response = self.renderingControl.GetLoudness( [ ("InstanceID", 0), ("Channel", "Master"), ] ) loudness = response["CurrentLoudness"] return bool(int(loudness)) @loudness.setter def loudness(self, loudness): loudness_value = "1" if loudness else "0" self.renderingControl.SetLoudness( [ ("InstanceID", 0), ("Channel", "Master"), ("DesiredLoudness", loudness_value), ] ) @property def balance(self): response_lf = self.renderingControl.GetVolume( [ ("InstanceID", 0), ("Channel", "LF"), ] ) response_rf = self.renderingControl.GetVolume( [ ("InstanceID", 0), ("Channel", "RF"), ] ) volume_lf = response_lf["CurrentVolume"] volume_rf = response_rf["CurrentVolume"] return int(volume_lf), int(volume_rf) @balance.setter def balance(self, left_right_tuple): left, right = left_right_tuple left = int(left) right = int(right) left = max(0, min(left, 100)) # Coerce in range right = max(0, min(right, 100)) # Coerce in range self.renderingControl.SetVolume( [("InstanceID", 0), ("Channel", "LF"), ("DesiredVolume", left)] ) self.renderingControl.SetVolume( [("InstanceID", 0), ("Channel", "RF"), ("DesiredVolume", right)] ) @property def night_mode(self): if not self.is_soundbar: return None response = self.renderingControl.GetEQ( [("InstanceID", 0), ("EQType", "NightMode")] ) return bool(int(response["CurrentValue"])) @night_mode.setter def night_mode(self, night_mode): if not self.is_soundbar: message = "This device does not support night mode" raise NotSupportedException(message) self.renderingControl.SetEQ( [ ("InstanceID", 0), ("EQType", "NightMode"), ("DesiredValue", int(night_mode)), ] ) @property def dialog_mode(self): if not self.is_soundbar: return None response = self.renderingControl.GetEQ( [("InstanceID", 0), ("EQType", "DialogLevel")] ) return bool(int(response["CurrentValue"])) @dialog_mode.setter def dialog_mode(self, dialog_mode): if not self.is_soundbar: message = "This device does not support dialog mode" raise NotSupportedException(message) self.renderingControl.SetEQ( [ ("InstanceID", 0), ("EQType", "DialogLevel"), ("DesiredValue", int(dialog_mode)), ] ) @property def trueplay(self): response = self.renderingControl.GetRoomCalibrationStatus([("InstanceID", 0)]) if response["RoomCalibrationAvailable"] == "0": return None else: return response["RoomCalibrationEnabled"] == "1" @trueplay.setter def trueplay(self, trueplay): response = self.renderingControl.GetRoomCalibrationStatus([("InstanceID", 0)]) if response["RoomCalibrationAvailable"] == "0": raise NotSupportedException if not self.is_visible: raise SoCoNotVisibleException trueplay_value = "1" if trueplay else "0" self.renderingControl.SetRoomCalibrationStatus( [ ("InstanceID", 0), ("RoomCalibrationEnabled", trueplay_value), ] ) @property def supports_fixed_volume(self): response = self.renderingControl.GetSupportsOutputFixed([("InstanceID", 0)]) return response["CurrentSupportsFixed"] == "1" @property def fixed_volume(self): response = self.renderingControl.GetOutputFixed([("InstanceID", 0)]) return response["CurrentFixed"] == "1" @fixed_volume.setter def fixed_volume(self, fixed_volume): try: self.renderingControl.SetOutputFixed( [ ("InstanceID", 0), ("DesiredFixed", "1" if fixed_volume else "0"), ] ) except SoCoUPnPException as error: raise NotSupportedException from error def _parse_zone_group_state(self): # zoneGroupTopology.GetZoneGroupState()['ZoneGroupState'] returns XML like # this: # # <ZoneGroups> # <ZoneGroup Coordinator="RINCON_000XXX1400" ID="RINCON_000XXXX1400:0"> # <ZoneGroupMember # BootSeq="33" # Configuration="1" # Icon="x-rincon-roomicon:zoneextender" # Invisible="1" # IsZoneBridge="1" # Location="http://192.168.1.100:1400/xml/device_description.xml" # MinCompatibleVersion="22.0-00000" # SoftwareVersion="24.1-74200" # UUID="RINCON_000ZZZ1400" # ZoneName="BRIDGE"/> # </ZoneGroup> # <ZoneGroup Coordinator="RINCON_000XXX1400" ID="RINCON_000XXX1400:46"> # <ZoneGroupMember # BootSeq="44" # Configuration="1" # Icon="x-rincon-roomicon:living" # Location="http://192.168.1.101:1400/xml/device_description.xml" # MinCompatibleVersion="22.0-00000" # SoftwareVersion="24.1-74200" # UUID="RINCON_000XXX1400" # ZoneName="Living Room"/> # <ZoneGroupMember # BootSeq="52" # Configuration="1" # Icon="x-rincon-roomicon:kitchen" # Location="http://192.168.1.102:1400/xml/device_description.xml" # MinCompatibleVersion="22.0-00000" # SoftwareVersion="24.1-74200" # UUID="RINCON_000YYY1400" # ZoneName="Kitchen"/> # </ZoneGroup> # </ZoneGroups> # def parse_zone_group_member(member_element): # Create a SoCo instance for each member. Because SoCo # instances are singletons, this is cheap if they have already # been created, and useful if they haven't. We can then member_attribs = member_element.attrib ip_addr = member_attribs["Location"].split("//")[1].split(":")[0] zone = config.SOCO_CLASS(ip_addr) zone._zgs_cache = self._zgs_cache zone._uid = member_attribs["UUID"] zone._player_name = member_attribs["ZoneName"] is_visible = member_attribs.get("Invisible") != "1" if is_visible: self._visible_zones.add(zone) self._all_zones.add(zone) return zone zgs = self.zoneGroupTopology.GetZoneGroupState(cache=self._zgs_cache)[ "ZoneGroupState" ] if zgs == self._zgs_result: return self._zgs_result = zgs tree = XML.fromstring(zgs.encode("utf-8")) self._groups.clear() self._all_zones.clear() self._visible_zones.clear() for group_element in tree.find("ZoneGroups").findall("ZoneGroup"): coordinator_uid = group_element.attrib["Coordinator"] group_uid = group_element.attrib["ID"] group_coordinator = None members = set() for member_element in group_element.findall("ZoneGroupMember"): zone = parse_zone_group_member(member_element) if zone._uid == coordinator_uid: group_coordinator = zone zone._is_coordinator = True else: zone._is_coordinator = False # set/reset it here, just in case the zone has not been seen # before zone._is_bridge = member_element.attrib.get("IsZoneBridge") == "1" # add the zone to the members for this group members.add(zone) # Loop over Satellite elements if present, and process as for # ZoneGroup elements for satellite_element in member_element.findall("Satellite"): zone = parse_zone_group_member(satellite_element) # Assume a satellite can't be a bridge or coordinator, so members.add(zone) self._groups.add(ZoneGroup(group_uid, group_coordinator, members)) @property def all_groups(self): self._parse_zone_group_state() return self._groups.copy() @property def group(self): for group in self.all_groups: if self in group: return group return None @property def all_zones(self): self._parse_zone_group_state() return self._all_zones.copy() @property def visible_zones(self): self._parse_zone_group_state() return self._visible_zones.copy() def partymode(self): [zone.join(self) for zone in self.visible_zones if zone is not self] def join(self, master): self.avTransport.SetAVTransportURI( [ ("InstanceID", 0), ("CurrentURI", "x-rincon:{}".format(master.uid)), ("CurrentURIMetaData", ""), ] ) self._zgs_cache.clear() def unjoin(self): self.avTransport.BecomeCoordinatorOfStandaloneGroup([("InstanceID", 0)]) self._zgs_cache.clear() def create_stereo_pair(self, rh_slave_speaker): param = self.uid + ":LF,LF;" + rh_slave_speaker.uid + ":RF,RF" self.deviceProperties.AddBondedZones([("ChannelMapSet", param)]) def separate_stereo_pair(self): self.deviceProperties.RemoveBondedZones( [("ChannelMapSet", ""), ("KeepGrouped", "0")] ) def switch_to_line_in(self, source=None): if source: uid = source.uid else: uid = self.uid self.avTransport.SetAVTransportURI( [ ("InstanceID", 0), ("CurrentURI", "x-rincon-stream:{}".format(uid)), ("CurrentURIMetaData", ""), ] ) @property def is_playing_radio(self): return self.music_source == MUSIC_SRC_RADIO @property def is_playing_line_in(self): return self.music_source == MUSIC_SRC_LINE_IN @property def is_playing_tv(self): return self.music_source == MUSIC_SRC_TV @staticmethod def music_source_from_uri(uri): for regex, source in SOURCES.items(): if re.match(regex, uri) is not None: return source return MUSIC_SRC_UNKNOWN @property def music_source(self): response = self.avTransport.GetPositionInfo( [("InstanceID", 0), ("Channel", "Master")] ) return self.music_source_from_uri(response["TrackURI"]) def switch_to_tv(self): self.avTransport.SetAVTransportURI( [ ("InstanceID", 0), ("CurrentURI", "x-sonos-htastream:{}:spdif".format(self.uid)), ("CurrentURIMetaData", ""), ] ) @property def status_light(self): result = self.deviceProperties.GetLEDState() LEDState = result["CurrentLEDState"] return LEDState == "On" @status_light.setter def status_light(self, led_on): led_state = "On" if led_on else "Off" self.deviceProperties.SetLEDState( [ ("DesiredLEDState", led_state), ] ) @property def buttons_enabled(self): lock_state = self.deviceProperties.GetButtonLockState()[ "CurrentButtonLockState" ] return lock_state == "Off" @buttons_enabled.setter def buttons_enabled(self, enabled): if not self.is_visible: raise SoCoNotVisibleException lock_state = "Off" if enabled else "On" self.deviceProperties.SetButtonLockState( [ ("DesiredButtonLockState", lock_state), ] ) def get_current_track_info(self): response = self.avTransport.GetPositionInfo( [("InstanceID", 0), ("Channel", "Master")] ) track = { "title": "", "artist": "", "album": "", "album_art": "", "position": "", } track["playlist_position"] = response["Track"] track["duration"] = response["TrackDuration"] track["uri"] = response["TrackURI"] track["position"] = response["RelTime"] metadata = response["TrackMetaData"] track["metadata"] = metadata def _parse_radio_metadata(metadata): radio_track = {} trackinfo = ( metadata.findtext( ".//{urn:schemas-rinconnetworks-com:" "metadata-1-0/}streamContent" ) or "" ) index = trackinfo.find(" - ") if index > -1: radio_track["artist"] = trackinfo[:index] radio_track["title"] = trackinfo[index + 3 :] elif "TYPE=SNG|" in trackinfo: tags = dict([p.split(" ", 1) for p in trackinfo.split("|") if " " in p]) if tags.get("TITLE"): radio_track["title"] = tags["TITLE"] if tags.get("ARTIST"): radio_track["artist"] = tags["ARTIST"] if tags.get("ALBUM"): radio_track["album"] = tags["ALBUM"] else: radio_track["title"] = metadata.findtext( ".//{http://purl.org/dc/" "elements/1.1/}title" ) if not radio_track["title"]: radio_track["title"] = trackinfo return radio_track if metadata != "" and track["duration"] == "0:00:00": metadata = XML.fromstring(really_utf8(metadata)) track.update(_parse_radio_metadata(metadata)) elif metadata not in ("", "NOT_IMPLEMENTED", None): metadata = XML.fromstring(really_utf8(metadata)) md_title = metadata.findtext(".//{http://purl.org/dc/elements/1.1/}title") md_artist = metadata.findtext( ".//{http://purl.org/dc/elements/1.1/}creator" ) md_album = metadata.findtext( ".//{urn:schemas-upnp-org:metadata-1-0/upnp/}album" ) track["title"] = "" if md_title: track["title"] = md_title track["artist"] = "" if md_artist: track["artist"] = md_artist track["album"] = "" if md_album: track["album"] = md_album album_art_url = metadata.findtext( ".//{urn:schemas-upnp-org:metadata-1-0/upnp/}albumArtURI" ) if album_art_url is not None: track["album_art"] = self.music_library.build_album_art_full_uri( album_art_url ) return track def get_current_media_info(self): response = self.avTransport.GetMediaInfo([("InstanceID", 0)]) media = {"uri": "", "channel": ""} media["uri"] = response["CurrentURI"] metadata = response.get("CurrentURIMetaData") if metadata: metadata = XML.fromstring(really_utf8(metadata)) md_title = metadata.findtext(".//{http://purl.org/dc/elements/1.1/}title") if md_title: media["channel"] = md_title return media def get_speaker_info(self, refresh=False, timeout=None): if self.speaker_info and refresh is False: return self.speaker_info else: response = requests.get( "http://" + self.ip_address + ":1400/xml/device_description.xml", timeout=timeout, ) dom = XML.fromstring(response.content) device = dom.find("{urn:schemas-upnp-org:device-1-0}device") if device is not None: self.speaker_info["zone_name"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}roomName" ) self.speaker_info["player_icon"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}iconList/" "{urn:schemas-upnp-org:device-1-0}icon/" "{urn:schemas-upnp-org:device-1-0}url" ) self.speaker_info["uid"] = self.uid self.speaker_info["serial_number"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}serialNum" ) self.speaker_info["software_version"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}softwareVersion" ) self.speaker_info["hardware_version"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}hardwareVersion" ) self.speaker_info["model_number"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}modelNumber" ) self.speaker_info["model_name"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}modelName" ) self.speaker_info["display_version"] = device.findtext( "{urn:schemas-upnp-org:device-1-0}displayVersion" ) mac = self.speaker_info["serial_number"].split(":")[0] self.speaker_info["mac_address"] = mac return self.speaker_info return None def get_current_transport_info(self): response = self.avTransport.GetTransportInfo( [ ("InstanceID", 0), ] ) playstate = { "current_transport_status": "", "current_transport_state": "", "current_transport_speed": "", } playstate["current_transport_state"] = response["CurrentTransportState"] playstate["current_transport_status"] = response["CurrentTransportStatus"] playstate["current_transport_speed"] = response["CurrentSpeed"] return playstate @property @only_on_master def available_actions(self): result = self.avTransport.GetCurrentTransportActions([("InstanceID", 0)]) actions = result["Actions"] return [action.split("_")[-1] for action in actions.split(", ")] def get_queue(self, start=0, max_items=100, full_album_art_uri=False): queue = [] response = self.contentDirectory.Browse( [ ("ObjectID", "Q:0"), ("BrowseFlag", "BrowseDirectChildren"), ("Filter", "*"), ("StartingIndex", start), ("RequestedCount", max_items), ("SortCriteria", ""), ] ) result = response["Result"] metadata = {} for tag in ["NumberReturned", "TotalMatches", "UpdateID"]: metadata[camel_to_underscore(tag)] = int(response[tag]) # there is still a result object. This shoud be investigated. if not result: # pylint: disable=star-args return Queue(queue, **metadata) items = from_didl_string(result) for item in items: # Check if the album art URI should be fully qualified if full_album_art_uri: self.music_library._update_album_art_to_full_uri(item) queue.append(item) # pylint: disable=star-args return Queue(queue, **metadata) @property def queue_size(self): response = self.contentDirectory.Browse( [ ("ObjectID", "Q:0"), ("BrowseFlag", "BrowseMetadata"), ("Filter", "*"), ("StartingIndex", 0), ("RequestedCount", 1), ("SortCriteria", ""), ] ) dom = XML.fromstring(really_utf8(response["Result"])) queue_size = None container = dom.find("{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}container") if container is not None: child_count = container.get("childCount") if child_count is not None: queue_size = int(child_count) return queue_size def get_sonos_playlists(self, *args, **kwargs): args = tuple(["sonos_playlists"] + list(args)) return self.music_library.get_music_library_information(*args, **kwargs) @only_on_master def add_uri_to_queue(self, uri, position=0, as_next=False): # FIXME: The res.protocol_info should probably represent the mime type # etc of the uri. But this seems OK. res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")] item = DidlObject(resources=res, title="", parent_id="", item_id="") return self.add_to_queue(item, position, as_next) @only_on_master def add_to_queue(self, queueable_item, position=0, as_next=False): metadata = to_didl_string(queueable_item) response = self.avTransport.AddURIToQueue( [ ("InstanceID", 0), ("EnqueuedURI", queueable_item.resources[0].uri), ("EnqueuedURIMetaData", metadata), ("DesiredFirstTrackNumberEnqueued", position), ("EnqueueAsNext", int(as_next)), ] ) qnumber = response["FirstTrackNumberEnqueued"] return int(qnumber) def add_multiple_to_queue(self, items, container=None): if container is not None: container_uri = container.resources[0].uri container_metadata = to_didl_string(container) else: container_uri = "" # Sonos seems to accept this as well container_metadata = "" # pylint: disable=redefined-variable-type chunk_size = 16 # With each request, we can only add 16 items item_list = list(items) # List for slicing for index in range(0, len(item_list), chunk_size): chunk = item_list[index : index + chunk_size] uris = " ".join([item.resources[0].uri for item in chunk]) uri_metadata = " ".join([to_didl_string(item) for item in chunk]) self.avTransport.AddMultipleURIsToQueue( [ ("InstanceID", 0), ("UpdateID", 0), ("NumberOfURIs", len(chunk)), ("EnqueuedURIs", uris), ("EnqueuedURIsMetaData", uri_metadata), ("ContainerURI", container_uri), ("ContainerMetaData", container_metadata), ("DesiredFirstTrackNumberEnqueued", 0), ("EnqueueAsNext", 0), ] ) @only_on_master def remove_from_queue(self, index): # TODO: what do these parameters actually do? updid = "0" objid = "Q:0/" + str(index + 1) self.avTransport.RemoveTrackFromQueue( [ ("InstanceID", 0), ("ObjectID", objid), ("UpdateID", updid), ] ) @only_on_master def clear_queue(self): self.avTransport.RemoveAllTracksFromQueue( [ ("InstanceID", 0), ] ) @deprecated("0.13", "soco.music_library.get_favorite_radio_shows", "0.15", True) def get_favorite_radio_shows(self, start=0, max_items=100): message = ( "The output type of this method will probably change in " "the future to use SoCo data structures" ) warnings.warn(message, stacklevel=2) return self.__get_favorites(RADIO_SHOWS, start, max_items) @deprecated("0.13", "soco.music_library.get_favorite_radio_stations", "0.15", True) def get_favorite_radio_stations(self, start=0, max_items=100): message = ( "The output type of this method will probably change in " "the future to use SoCo data structures" ) warnings.warn(message, stacklevel=2) return self.__get_favorites(RADIO_STATIONS, start, max_items) @deprecated("0.13", "soco.music_library.get_sonos_favorites", "0.15", True) def get_sonos_favorites(self, start=0, max_items=100): message = ( "The output type of this method will probably change in " "the future to use SoCo data structures" ) warnings.warn(message, stacklevel=2) return self.__get_favorites(SONOS_FAVORITES, start, max_items) def __get_favorites(self, favorite_type, start=0, max_items=100): if favorite_type not in (RADIO_SHOWS, RADIO_STATIONS): favorite_type = SONOS_FAVORITES response = self.contentDirectory.Browse( [ ( "ObjectID", "FV:2" if favorite_type is SONOS_FAVORITES else "R:0/{}".format(favorite_type), ), ("BrowseFlag", "BrowseDirectChildren"), ("Filter", "*"), ("StartingIndex", start), ("RequestedCount", max_items), ("SortCriteria", ""), ] ) result = {} favorites = [] results_xml = response["Result"] if results_xml != "": # Favorites are returned in DIDL-Lite format metadata = XML.fromstring(really_utf8(results_xml)) for item in metadata.findall( "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}container" if favorite_type == RADIO_SHOWS else "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}item" ): favorite = {} favorite["title"] = item.findtext( "{http://purl.org/dc/elements/1.1/}title" ) favorite["uri"] = item.findtext( "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}res" ) if favorite_type == SONOS_FAVORITES: favorite["meta"] = item.findtext( "{urn:schemas-rinconnetworks-com:metadata-1-0/}resMD" ) favorites.append(favorite) result["total"] = response["TotalMatches"] result["returned"] = len(favorites) result["favorites"] = favorites return result def create_sonos_playlist(self, title): response = self.avTransport.CreateSavedQueue( [ ("InstanceID", 0), ("Title", title), ("EnqueuedURI", ""), ("EnqueuedURIMetaData", ""), ] ) item_id = response["AssignedObjectID"] obj_id = item_id.split(":", 2)[1] uri = "file:///jffs/settings/savedqueues.rsq#{}".format(obj_id) res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")] return DidlPlaylistContainer( resources=res, title=title, parent_id="SQ:", item_id=item_id ) @only_on_master # pylint: disable=invalid-name def create_sonos_playlist_from_queue(self, title): # Note: probably same as Queue service method SaveAsSonosPlaylist # but this has not been tested. This method is what the # controller uses. response = self.avTransport.SaveQueue( [("InstanceID", 0), ("Title", title), ("ObjectID", "")] ) item_id = response["AssignedObjectID"] obj_id = item_id.split(":", 2)[1] uri = "file:///jffs/settings/savedqueues.rsq#{}".format(obj_id) res = [DidlResource(uri=uri, protocol_info="x-rincon-playlist:*:*:*")] return DidlPlaylistContainer( resources=res, title=title, parent_id="SQ:", item_id=item_id ) @only_on_master def remove_sonos_playlist(self, sonos_playlist): object_id = getattr(sonos_playlist, "item_id", sonos_playlist) return self.contentDirectory.DestroyObject([("ObjectID", object_id)]) def add_item_to_sonos_playlist(self, queueable_item, sonos_playlist): # Get the update_id for the playlist response, _ = self.music_library._music_lib_search(sonos_playlist.item_id, 0, 1) update_id = response["UpdateID"] # Form the metadata for queueable_item metadata = to_didl_string(queueable_item) # Make the request self.avTransport.AddURIToSavedQueue( [ ("InstanceID", 0), ("UpdateID", update_id), ("ObjectID", sonos_playlist.item_id), ("EnqueuedURI", queueable_item.resources[0].uri), ("EnqueuedURIMetaData", metadata), # 2 ** 32 - 1 = 4294967295, this field has always this value. Most # likely, playlist positions are represented as a 32 bit uint and # this is therefore the largest index possible. Asking to add at # this index therefore probably amounts to adding it "at the end" ("AddAtIndex", 4294967295), ] ) @only_on_master def set_sleep_timer(self, sleep_time_seconds): # Note: A value of None for sleep_time_seconds is valid, and needs to # be preserved distinctly separate from 0. 0 means go to sleep now, # which will immediately start the sound tappering, and could be a # useful feature, while None means cancel the current timer try: if sleep_time_seconds is None: sleep_time = "" else: sleep_time = format(datetime.timedelta(seconds=int(sleep_time_seconds))) self.avTransport.ConfigureSleepTimer( [ ("InstanceID", 0), ("NewSleepTimerDuration", sleep_time), ] ) except SoCoUPnPException as err: if "Error 402 received" in str(err): raise ValueError( "invalid sleep_time_seconds, must be integer \ value between 0 and 86399 inclusive or None" ) from err raise except ValueError as error: raise ValueError( "invalid sleep_time_seconds, must be integer \ value between 0 and 86399 inclusive or None" ) from error @only_on_master def get_sleep_timer(self): resp = self.avTransport.GetRemainingSleepTimerDuration( [ ("InstanceID", 0), ] ) if resp["RemainingSleepTimerDuration"]: times = resp["RemainingSleepTimerDuration"].split(":") return int(times[0]) * 3600 + int(times[1]) * 60 + int(times[2]) else: return None @only_on_master def reorder_sonos_playlist(self, sonos_playlist, tracks, new_pos, update_id=0): # allow either a string 'SQ:10' or an object with item_id attribute. object_id = getattr(sonos_playlist, "item_id", sonos_playlist) if isinstance(tracks, str): track_list = [ tracks, ] position_list = [ new_pos, ] elif isinstance(tracks, int): track_list = [ tracks, ] if new_pos is None: new_pos = "" position_list = [ new_pos, ] else: track_list = [str(x) for x in tracks] position_list = [str(x) if x is not None else "" for x in new_pos] # track_list = ','.join(track_list) # position_list = ','.join(position_list) if update_id == 0: # retrieve the update id for the object response, _ = self.music_library._music_lib_search(object_id, 0, 1) update_id = response["UpdateID"] change = 0 for track, position in zip(track_list, position_list): if track == position: # there is no move, a no-op continue response = self.avTransport.ReorderTracksInSavedQueue( [ ("InstanceID", 0), ("ObjectID", object_id), ("UpdateID", update_id), ("TrackList", track), ("NewPositionList", position), ] ) change += int(response["QueueLengthChange"]) update_id = int(response["NewUpdateID"]) length = int(response["NewQueueLength"]) response = {"change": change, "update_id": update_id, "length": length} return response @only_on_master def clear_sonos_playlist(self, sonos_playlist, update_id=0): if not isinstance(sonos_playlist, DidlPlaylistContainer): sonos_playlist = self.get_sonos_playlist_by_attr("item_id", sonos_playlist) count = self.music_library.browse(ml_item=sonos_playlist).total_matches tracks = ",".join([str(x) for x in range(count)]) if tracks: return self.reorder_sonos_playlist( sonos_playlist, tracks=tracks, new_pos="", update_id=update_id ) else: return {"change": 0, "update_id": update_id, "length": count} @only_on_master def move_in_sonos_playlist(self, sonos_playlist, track, new_pos, update_id=0): return self.reorder_sonos_playlist( sonos_playlist, int(track), int(new_pos), update_id ) @only_on_master def remove_from_sonos_playlist(self, sonos_playlist, track, update_id=0): return self.reorder_sonos_playlist(sonos_playlist, int(track), None, update_id) @only_on_master def get_sonos_playlist_by_attr(self, attr_name, match): for sonos_playlist in self.get_sonos_playlists(): if getattr(sonos_playlist, attr_name) == match: return sonos_playlist raise ValueError('No match on "{}" for value "{}"'.format(attr_name, match)) def get_battery_info(self, timeout=3.0): # Retrieve information from the speaker's status URL try: response = requests.get( "http://" + self.ip_address + ":1400/status/batterystatus", timeout=timeout, ) except (ConnectTimeout, ReadTimeout) as error: raise TimeoutError from error except RequestsConnectionError as error: raise ConnectionError from error if response.status_code != 200: raise ConnectionError battery_info = {} try: zp_info = xmltodict.parse(response.text)["ZPSupportInfo"] for info_item in zp_info["LocalBatteryStatus"]["Data"]: battery_info[info_item["@name"]] = info_item["#text"] try: battery_info["Level"] = int(battery_info["Level"]) except (KeyError, ValueError): pass except (KeyError, ExpatError, TypeError) as error: raise NotSupportedException from error return battery_info RADIO_STATIONS = 0 RADIO_SHOWS = 1 SONOS_FAVORITES = 2 NS = { "dc": "{http://purl.org/dc/elements/1.1/}", "upnp": "{urn:schemas-upnp-org:metadata-1-0/upnp/}", "": "{urn:schemas-upnp-org:metadata-1-0/DIDL-Lite/}", } PLAY_MODES = { "NORMAL": (False, False), "SHUFFLE_NOREPEAT": (True, False), "SHUFFLE": (True, True), "REPEAT_ALL": (False, True), "SHUFFLE_REPEAT_ONE": (True, "ONE"), "REPEAT_ONE": (False, "ONE"), } PLAY_MODE_BY_MEANING = {meaning: mode for mode, meaning in PLAY_MODES.items()} MUSIC_SRC_LIBRARY = "LIBRARY" MUSIC_SRC_RADIO = "RADIO" MUSIC_SRC_WEB_FILE = "WEB_FILE" MUSIC_SRC_LINE_IN = "LINE_IN" MUSIC_SRC_TV = "TV" MUSIC_SRC_AIRPLAY = "AIRPLAY" MUSIC_SRC_UNKNOWN = "UNKNOWN" MUSIC_SRC_NONE = "NONE" SOURCES = { r"^$": MUSIC_SRC_NONE, r"^x-file-cifs:": MUSIC_SRC_LIBRARY, r"^x-rincon-mp3radio:": MUSIC_SRC_RADIO, r"^x-sonosapi-stream:": MUSIC_SRC_RADIO, r"^x-sonosapi-radio:": MUSIC_SRC_RADIO, r"^x-sonosapi-hls:": MUSIC_SRC_RADIO, r"^aac:": MUSIC_SRC_RADIO, r"^hls-radio:": MUSIC_SRC_RADIO, r"^https?:": MUSIC_SRC_WEB_FILE, r"^x-rincon-stream:": MUSIC_SRC_LINE_IN, r"^x-sonos-htastream:": MUSIC_SRC_TV, r"^x-sonos-vli:.*,airplay:": MUSIC_SRC_AIRPLAY, } SOUNDBARS = ("playbase", "playbar", "beam", "sonos amp", "arc", "arc sl") if config.SOCO_CLASS is None: config.SOCO_CLASS = SoCo
true
true